Commit
·
4d4a1c6
1
Parent(s):
55eeb5e
adding the finance data scrape from yfinance
Browse files
backend/tasks/__pycache__/data_tasks.cpython-311.pyc
ADDED
Binary file (2.4 kB). View file
|
|
backend/tasks/data_tasks.py
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from celery_worker import celery
|
2 |
+
from core.database import SessionLocal
|
3 |
+
from models.analysis_job import AnalysisJob
|
4 |
+
from tools.data_tools import get_stock_data
|
5 |
+
from uuid import UUID
|
6 |
+
|
7 |
+
@celery.task
|
8 |
+
def run_data_analysis(job_id: str, ticker: str):
|
9 |
+
db = SessionLocal()
|
10 |
+
job = None
|
11 |
+
try:
|
12 |
+
job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
|
13 |
+
if not job:
|
14 |
+
print(f"Job {job_id} not found in database.")
|
15 |
+
return
|
16 |
+
|
17 |
+
print(f"Starting data analysis for job {job_id}, ticker {ticker}...")
|
18 |
+
job.status = "RUNNING"
|
19 |
+
db.commit()
|
20 |
+
|
21 |
+
data = get_stock_data(ticker)
|
22 |
+
|
23 |
+
job.result = data
|
24 |
+
job.status = "SUCCESS"
|
25 |
+
db.commit()
|
26 |
+
print(f"Data analysis for job {job_id} completed successfully.")
|
27 |
+
|
28 |
+
# get the result BEFORE closing the session
|
29 |
+
final_result = str(job.result)
|
30 |
+
|
31 |
+
except Exception as e:
|
32 |
+
print(f"Error during data analysis for job {job_id}: {e}")
|
33 |
+
if job:
|
34 |
+
job.status = "FAILED"
|
35 |
+
job.result = {"error": str(e)}
|
36 |
+
db.commit()
|
37 |
+
final_result = f"Error: {e}"
|
38 |
+
finally:
|
39 |
+
# always close the database session
|
40 |
+
db.close()
|
41 |
+
|
42 |
+
# return the variable that is no longer attached to the session
|
43 |
+
return final_result
|