halme commited on
Commit
d9b3cda
·
verified ·
1 Parent(s): e78f32a

Delete DataLoader.py

Browse files
Files changed (1) hide show
  1. DataLoader.py +0 -108
DataLoader.py DELETED
@@ -1,108 +0,0 @@
1
- #Class to fetch news and stock data from the web for a specific ticker and combine them into a dataframe.
2
-
3
- import pandas as pd
4
- import yfinance as yf
5
- from pygooglenews import GoogleNews
6
- from transformers import AutoTokenizer, AutoModelForSequenceClassification
7
- from transformers import pipeline
8
- class DataLoader:
9
- def __init__(self, ticker, time_period_news, time_period_stock, news_decay_rate = 0):
10
- self.ticker = ticker
11
- self.time_period_news = time_period_news
12
- self.time_period_stock = time_period_stock
13
- self.news_decay_rate = news_decay_rate
14
-
15
- def get_data(self):
16
- stock_data = self.get_stock_data()
17
- news_data = self.get_news_data()
18
- news_sentiment = self.get_sentiment(news_data)
19
- combined_data = self.combine_data(stock_data, news_sentiment)
20
-
21
- if self.news_decay_rate != 0:
22
- combined_data = self.news_decay(combined_data, self.news_decay_rate)
23
-
24
- return combined_data
25
-
26
-
27
- def get_stock_data(self):
28
- data = yf.download(self.ticker , period = self.time_period_stock)
29
- df = pd.DataFrame()
30
- df['Open'] = data['Open']
31
- df['Close'] = data['Close']
32
- df['High'] = data['High']
33
- df['Low'] = data['Low']
34
- df['Volume'] = data['Volume']
35
-
36
- return df
37
-
38
- def get_news_data(self):
39
- googlenews = GoogleNews()
40
- news_data = googlenews.search(self.ticker, when=self.time_period_news)
41
- news_data = pd.DataFrame(news_data['entries'])
42
- return news_data
43
-
44
- def get_sentiment(self, news_data):
45
- tokenizer = AutoTokenizer.from_pretrained("ProsusAI/finbert")
46
- model = AutoModelForSequenceClassification.from_pretrained("ProsusAI/finbert")
47
- classifier = pipeline('sentiment-analysis', model=model, tokenizer=tokenizer)
48
-
49
- news_sentiment = []
50
- for i in range(len(news_data)):
51
- sentiment = classifier(news_data['title'][i], top_k=None)
52
- postive_score = sentiment[0]['score']
53
- negative_score = sentiment[1]['score']
54
- neutral_score = sentiment[2]['score']
55
- reformmated_time_stamp = pd.to_datetime(news_data['published'][i]).date()
56
- news_sentiment.append({'Date': reformmated_time_stamp, 'positive_score': postive_score, 'negative_score': negative_score, 'neutral_score': neutral_score})
57
- return pd.DataFrame(news_sentiment)
58
-
59
- def combine_data(self, stock_data, news_sentiment):
60
- news_sentiment = (
61
- news_sentiment
62
- .groupby('Date')
63
- .mean()
64
- .fillna(0)
65
- .reset_index()
66
- .set_index('Date')
67
- .sort_index()
68
- )
69
-
70
- common_index = pd.date_range(
71
- start=pd.Timestamp(min(pd.Timestamp(stock_data.index[0]), pd.Timestamp(news_sentiment.index[0]))),
72
- end=pd.Timestamp(max(pd.Timestamp(stock_data.index[-1]), pd.Timestamp(news_sentiment.index[-1]))),
73
- freq='D'
74
- )
75
- stock_data = stock_data.reindex(common_index).fillna(-1)
76
-
77
- news_sentiment = news_sentiment.reindex(common_index).fillna(0)
78
-
79
- #Ensure stock_data and news_sentiment have combatile indices
80
- stock_data.index = pd.to_datetime(stock_data.index).normalize()
81
- news_sentiment.index = pd.to_datetime(news_sentiment.index).normalize()
82
-
83
- combined_data = pd.merge(
84
- stock_data,
85
- news_sentiment,
86
- how='left',
87
- left_index=True,
88
- right_index=True
89
- )
90
-
91
- #Drop all close values that are -1
92
- combined_data = combined_data[combined_data['Close'] != -1]
93
- #fill all missing values with 0
94
- combined_data = combined_data.fillna(0)
95
-
96
- return combined_data
97
-
98
- def news_decay(self, Combined_data, decay_rate):
99
- #We have lots of days in the data with no news. We will fill these days with the previous days news * decay_rate
100
- #This will allow us to have a more continuous news data
101
- combined_data = Combined_data.copy()
102
- news_columns = ['positive_score', 'negative_score', 'neutral_score']
103
- #We want to start from the oldest date and work our way to the newest date
104
- for i in range(1, len(combined_data)):
105
- for column in news_columns:
106
- if combined_data[column][i] == 0 and combined_data[column][i-1] != 0:
107
- combined_data[column][i] = combined_data[column][i-1] * decay_rate
108
- return combined_data