File size: 2,680 Bytes
c3bf538 4d4a1c6 c3bf538 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 |
# from celery_worker import celery
# from core.database import SessionLocal
# from models.analysis_job import AnalysisJob
# from tools.data_tools import get_stock_data
# from uuid import UUID
# @celery.task
# def run_data_analysis(job_id: str, ticker: str):
# db = SessionLocal()
# job = None
# final_result = ""
# try:
# job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
# if not job:
# raise ValueError(f"Job {job_id} not found in database.")
# print(f"Status - DATA_FETCHING for job {job_id}...")
# job.status = "DATA_FETCHING"
# db.commit()
# data = get_stock_data(ticker)
# if "error" in data:
# raise ValueError(data["error"])
# job.result = data
# db.commit()
# print(f"Data analysis for job {job_id} completed successfully.")
# final_result = str(job.result)
# except Exception as e:
# print(f"Error during data analysis for job {job_id}: {e}")
# if job:
# job.status = "FAILED"
# job.result = {"error": f"Data analysis failed: {str(e)}"}
# db.commit()
# final_result = f"Error: {e}"
# finally:
# db.close()
# return final_result
# from celery_worker import celery
# from core.database import SessionLocal
# from models.analysis_job import AnalysisJob
# from tools.data_tools import get_stock_data
# from uuid import UUID
# @celery.task
# def run_data_analysis(job_id: str, ticker: str):
# with SessionLocal() as db:
# job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
# if not job:
# print(f"Job {job_id} not found.")
# return
# try:
# job.status = "DATA_FETCHING"
# db.commit()
# data = get_stock_data(ticker)
# if "error" in data:
# raise ValueError(data["error"])
# job.result = data
# db.commit()
# print(f"Data analysis for job {job_id} completed successfully.")
# return "Data fetching successful."
# except Exception as e:
# print(f"Error during data analysis for job {job_id}: {e}")
# job.status = "FAILED"
# job.result = {"error": f"Data analysis failed: {str(e)}"}
# db.commit()
# return f"Data fetching failed: {e}"
from celery_worker import celery
from tools.data_tools import get_stock_data
@celery.task
def get_data_task(ticker: str):
print(f"Executing get_data_task for {ticker}...")
return get_stock_data(ticker) |