gusti-adli commited on
Commit
05a567c
·
2 Parent(s): a59e1e1 ddbb8eb

Merge branch 'main' of https://huggingface.co/spaces/gstdl/screener-saham-demo

Browse files
app/dataset/Klasifikasi Industri Perusahaan Tercatat.pdf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16d31e3ed82592828347ad047cea086300dc43b039518bfa1c1fd95069b7f12d
3
+ size 1378039
app/dataset/patterns.json ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "CDL2CROWS":"Two Crows",
3
+ "CDL3BLACKCROWS":"Three Black Crows",
4
+ "CDL3INSIDE":"Three Inside Up/Down",
5
+ "CDL3LINESTRIKE":"Three-Line Strike",
6
+ "CDL3OUTSIDE":"Three Outside Up/Down",
7
+ "CDL3STARSINSOUTH":"Three Stars In The South",
8
+ "CDL3WHITESOLDIERS":"Three Advancing White Soldiers",
9
+ "CDLABANDONEDBABY":"Abandoned Baby",
10
+ "CDLADVANCEBLOCK":"Advance Block",
11
+ "CDLBELTHOLD":"Belt-hold",
12
+ "CDLBREAKAWAY":"Breakaway",
13
+ "CDLCLOSINGMARUBOZU":"Closing Marubozu",
14
+ "CDLCONCEALBABYSWALL":"Concealing Baby Swallow",
15
+ "CDLCOUNTERATTACK":"Counterattack",
16
+ "CDLDARKCLOUDCOVER":"Dark Cloud Cover",
17
+ "CDLDOJI":"Doji",
18
+ "CDLDOJISTAR":"Doji Star",
19
+ "CDLDRAGONFLYDOJI":"Dragonfly Doji",
20
+ "CDLENGULFING":"Engulfing Pattern",
21
+ "CDLEVENINGDOJISTAR":"Evening Doji Star",
22
+ "CDLEVENINGSTAR":"Evening Star",
23
+ "CDLGAPSIDESIDEWHITE":"Up/Down-gap side-by-side white lines",
24
+ "CDLGRAVESTONEDOJI":"Gravestone Doji",
25
+ "CDLHAMMER":"Hammer",
26
+ "CDLHANGINGMAN":"Hanging Man",
27
+ "CDLHARAMI":"Harami Pattern",
28
+ "CDLHARAMICROSS":"Harami Cross Pattern",
29
+ "CDLHIGHWAVE":"High-Wave Candle",
30
+ "CDLHIKKAKE":"Hikkake Pattern",
31
+ "CDLHIKKAKEMOD":"Modified Hikkake Pattern",
32
+ "CDLHOMINGPIGEON":"Homing Pigeon",
33
+ "CDLIDENTICAL3CROWS":"Identical Three Crows",
34
+ "CDLINNECK":"In-Neck Pattern",
35
+ "CDLINVERTEDHAMMER":"Inverted Hammer",
36
+ "CDLKICKING":"Kicking",
37
+ "CDLKICKINGBYLENGTH":"Kicking - bull/bear determined by the longer marubozu",
38
+ "CDLLADDERBOTTOM":"Ladder Bottom",
39
+ "CDLLONGLEGGEDDOJI":"Long Legged Doji",
40
+ "CDLLONGLINE":"Long Line Candle",
41
+ "CDLMARUBOZU":"Marubozu",
42
+ "CDLMATCHINGLOW":"Matching Low",
43
+ "CDLMATHOLD":"Mat Hold",
44
+ "CDLMORNINGDOJISTAR":"Morning Doji Star",
45
+ "CDLMORNINGSTAR":"Morning Star",
46
+ "CDLONNECK":"On-Neck Pattern",
47
+ "CDLPIERCING":"Piercing Pattern",
48
+ "CDLRICKSHAWMAN":"Rickshaw Man",
49
+ "CDLRISEFALL3METHODS":"Rising/Falling Three Methods",
50
+ "CDLSEPARATINGLINES":"Separating Lines",
51
+ "CDLSHOOTINGSTAR":"Shooting Star",
52
+ "CDLSHORTLINE":"Short Line Candle",
53
+ "CDLSPINNINGTOP":"Spinning Top",
54
+ "CDLSTALLEDPATTERN":"Stalled Pattern",
55
+ "CDLSTICKSANDWICH":"Stick Sandwich",
56
+ "CDLTAKURI":"Takuri (Dragonfly Doji with very long lower shadow)",
57
+ "CDLTASUKIGAP":"Tasuki Gap",
58
+ "CDLTHRUSTING":"Thrusting Pattern",
59
+ "CDLTRISTAR":"Tristar Pattern",
60
+ "CDLUNIQUE3RIVER":"Unique 3 River",
61
+ "CDLUPSIDEGAP2CROWS":"Upside Gap Two Crows",
62
+ "CDLXSIDEGAP3METHODS":"Upside/Downside Gap Three Methods"
63
+ }
app/dataset/pull_data.py ADDED
@@ -0,0 +1,140 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import tabula
2
+ import yfinance as yfi
3
+ import sqlite3
4
+ import pandas as pd
5
+ import json
6
+ import talib
7
+ import time
8
+ import datetime
9
+ import warnings
10
+
11
+ warnings.filterwarnings("ignore")
12
+
13
+ with open("patterns.json", "r") as f:
14
+ patterns = json.load(f)
15
+
16
+ # update_time = datetime.datetime.now()
17
+ # dummy update time
18
+ update_time = "2021-05-01 15:20:03.672744"
19
+
20
+ def find_patterns(df):
21
+ result = pd.DataFrame(
22
+ columns=[
23
+ "Date",
24
+ "Kode",
25
+ "Pattern",
26
+ "Pattern_Score",
27
+ "Open_Close_Change",
28
+ "High_Low_Change",
29
+ ]
30
+ )
31
+ for attr, pattern in patterns.items():
32
+ scores = getattr(talib, attr)(df["Open"], df["High"], df["Low"], df["Close"])
33
+ mask = scores != 0
34
+ temp_result = df[mask]
35
+ if len(temp_result) > 0:
36
+ temp_result = temp_result.assign(
37
+ Open_Close_Change=(temp_result["Close"] - temp_result["Open"]) / temp_result["Open"],
38
+ High_Low_Change=(temp_result["High"] - temp_result["Low"]) / temp_result["Low"],
39
+ Pattern=[pattern] * len(temp_result),
40
+ Pattern_Score=scores[mask].values,
41
+ )[result.columns]
42
+ result = result.append(temp_result)
43
+ result = result.assign(time_updated = update_time)
44
+ return result
45
+
46
+
47
+ def pull_data_yfi():
48
+ start = time.time()
49
+ with sqlite3.connect("ihsg.db") as con:
50
+ tickers = pd.read_sql(
51
+ """
52
+ SELECT Kode FROM list_perusahaan
53
+ WHERE Kode != "IHSG"
54
+ """,
55
+ con=con,
56
+ ).values.flatten()
57
+ ihsg = (
58
+ yfi.download("^JKSE", start="2015-01-01", end="2021-05-01", progress=False)
59
+ .reset_index()
60
+ .dropna()
61
+ .assign(Kode="IHSG")
62
+ )
63
+ ihsg = ihsg[["Date", "Kode", "Open", "High", "Low", "Close", "Volume"]]
64
+ ihsg = ihsg.assign(time_updated = update_time)
65
+ ihsg.to_sql("historical", if_exists="replace", con=con, index=False)
66
+ pattern_search = find_patterns(ihsg)
67
+ pattern_search.to_sql("patterns", if_exists="replace", con=con, index=False)
68
+ for i in range(0, len(tickers), 50):
69
+ ticker = [f"{kode}.JK" for kode in tickers[i : i + 50]]
70
+ df = (
71
+ yfi.download(ticker, start="2015-01-01", end="2021-05-01", progress=False)
72
+ .T.unstack(level=1)
73
+ .T.reset_index()
74
+ .dropna()
75
+ .rename(columns={"level_1": "Kode"})
76
+ )
77
+ df = df[["Date", "Kode", "Open", "High", "Low", "Close", "Volume"]]
78
+ df["Kode"] = df["Kode"].str.replace(".JK", "")
79
+ for j, kode in enumerate(df["Kode"].unique()):
80
+ print(f"Finding Patterns for {kode} #{i+j+1}\t\t time elapsed = {time.time() - start:.2f} s")
81
+ pattern_search = find_patterns(df[df["Kode"] == kode])
82
+ pattern_search.to_sql("patterns", if_exists="append", con=con, index=False)
83
+ df = df.assign(time_updated = update_time)
84
+ df.to_sql("historical", if_exists="append", con=con, index=False)
85
+
86
+ def pull_data_klasifikasi_industri():
87
+ with sqlite3.connect("ihsg.db") as con:
88
+ cur = con.cursor()
89
+ cur.execute("DROP TABLE IF EXISTS list_perusahaan")
90
+ cur.execute("""
91
+ CREATE TABLE list_perusahaan (
92
+ Kode VARCHAR(4),
93
+ Nama TEXT,
94
+ Sektor TEXT,
95
+ Instrumen TEXT)
96
+ """)
97
+ cur.execute("""
98
+ INSERT INTO list_perusahaan VALUES
99
+ ('IHSG', 'Indeks Harga Saham Gabungan', NULL, 'Indeks')
100
+ """)
101
+ # TODO: Change Schema from Star Schema to Snowflake Schema
102
+ # list_perusahaan table will be the dimension table for sector and sub-sector fact tables
103
+ # note: list_perusahaan table is a dimension table for historical fact table
104
+
105
+ dfs = tabula.read_pdf("Klasifikasi Industri Perusahaan Tercatat.pdf", pages="all", stream=True)
106
+ # print(len(dfs))
107
+ for df in dfs:
108
+ kode, nama, sektor = None, None, None
109
+ for row in df.iloc[2:,:].itertuples():
110
+ if kode is not None and pd.notna(row[2]):
111
+ cur.execute(f"""
112
+ INSERT INTO list_perusahaan VALUES
113
+ ('{kode}', '{nama}', '{sektor}', 'Saham')
114
+ """)
115
+ kode, nama, sektor = None, None, None
116
+ elif kode is not None and pd.isna(row[2]):
117
+ if pd.notna(row[3]):
118
+ nama += " " + row[3]
119
+ if pd.notna(row[5]):
120
+ sektor += " " + row[5]
121
+ if kode is None and nama is None and sektor is None and pd.notna(row[2]):
122
+ if "saham" in row[8].lower():
123
+ kode = row[2]
124
+ nama = row[3]
125
+ sektor = row[5]
126
+ else:
127
+ if kode is not None:
128
+ cur.execute(f"""
129
+ INSERT INTO list_perusahaan VALUES
130
+ ('{kode}', '{nama}', '{sektor}', 'Saham')
131
+ """)
132
+ print("INSERTION RESULT: \n")
133
+ print(pd.read_sql("SELECT * FROM list_perusahaan", con=con).tail(2))
134
+ print(pd.read_sql("SELECT * FROM list_perusahaan", con=con).shape)
135
+ print("\n\n*--\n")
136
+ con.commit()
137
+
138
+ if __name__ == "__main__":
139
+ pull_data_klasifikasi_industri()
140
+ pull_data_yfi()
app/helper_script.py ADDED
@@ -0,0 +1,176 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from bokeh.plotting import figure
2
+ from bokeh.models import ColumnDataSource, HoverTool, Arrow, NormalHead
3
+ from bokeh.palettes import Spectral4
4
+ from bokeh.embed import components
5
+ import sqlite3
6
+ import pandas as pd
7
+
8
+
9
+ def get_tickers(pattern, last_dates=1):
10
+ # connect to database
11
+ with sqlite3.connect("dataset/ihsg.db") as con:
12
+
13
+ # retrieve data from database
14
+ tickers = pd.read_sql(f"""
15
+ SELECT Kode
16
+ FROM patterns
17
+ WHERE Date IN (
18
+ SELECT Date
19
+ FROM (
20
+ SELECT Date, ROW_NUMBER() OVER(ORDER BY Date DESC) AS rnk
21
+ FROM historical
22
+ WHERE Kode = 'IHSG'
23
+ ) a
24
+ WHERE rnk <= {last_dates + 1}
25
+ )
26
+ AND Pattern = '{pattern}'
27
+ ORDER BY Pattern_Score DESC, Open_Close_Change DESC, High_Low_Change DESC
28
+ """,
29
+ con=con,
30
+ ).iloc[:, 0].to_list()
31
+
32
+ return tickers
33
+
34
+ def get_data(kode, pattern):
35
+
36
+ # connect to database
37
+ with sqlite3.connect("dataset/ihsg.db") as con:
38
+
39
+ # retrieve data from database
40
+ df = pd.read_sql(f"""
41
+ SELECT *
42
+ FROM historical
43
+ WHERE Kode = '{kode}'
44
+ ORDER BY Date
45
+ """,
46
+ con=con,
47
+ parse_dates=['Date'],
48
+ )
49
+
50
+ # df = pd.read_sql(f"""
51
+ # SELECT
52
+ # historical.Date,
53
+ # historical.Open,
54
+ # historical.High,
55
+ # historical.Low,
56
+ # historical.Close,
57
+ # patterns.Pattern_Score
58
+ # FROM historical
59
+ # LEFT JOIN (
60
+ # SELECT Date, Kode, Pattern_Score
61
+ # FROM patterns
62
+ # WHERE Pattern = '{pattern}'
63
+ # ) AS patterns
64
+ # USING(Kode, Date)
65
+ # WHERE Kode = '{kode}'
66
+ # ORDER BY Date
67
+ # """,
68
+ # con=con,
69
+ # parse_dates=['Date'],
70
+ # )
71
+
72
+ nama = pd.read_sql(
73
+ f"SELECT Nama FROM list_perusahaan WHERE Kode = '{kode}'",
74
+ con=con,
75
+ ).values[0][0]
76
+
77
+ return df, nama
78
+
79
+ def plot_candlestick(df, nama, kode):
80
+
81
+ # calculate simple moving average
82
+ for period in [5,20,200]:
83
+ df[f'sma{period}'] = df['Close'].rolling(period, period).mean()
84
+
85
+ # Prepare data for plotting
86
+ cds = ColumnDataSource(df)
87
+ cds_inc = ColumnDataSource(df[df["Close"] >= df["Open"]])
88
+ cds_dec = ColumnDataSource(df[df["Open"] > df["Close"]])
89
+
90
+ # assign figure canvas to variable p
91
+ x_range = (max(len(df) - 60.5, 0), len(df))
92
+ p = figure(
93
+ tools="pan,zoom_in,zoom_out,box_zoom,undo,redo,reset,save",
94
+ plot_width=600,
95
+ plot_height=400,
96
+ title = f"{kode}\t({nama})",
97
+ x_range= x_range,
98
+ y_range= (
99
+ df.loc[x_range[0]//1-5:x_range[1], ["Open", "High", "Low", "Close", "sma5", "sma20", "sma200"]].min().min() * 0.875,
100
+ df.loc[x_range[0]//1-5:x_range[1], ["Open", "High", "Low", "Close", "sma5", "sma20", "sma200"]].max().max() * 1.125
101
+ )
102
+ )
103
+
104
+ # xaxis setup
105
+ p.xaxis.major_label_overrides = {
106
+ i: date.strftime('%d %b %Y') for i, date in enumerate(df["Date"])
107
+ }
108
+ p.xaxis.bounds = (0, df.index[-1])
109
+ p.xaxis.major_label_orientation = (22/7)/4
110
+ p.grid.grid_line_alpha=0.3
111
+
112
+ # # plot pattern arrow
113
+ # for idx in df[df["Pattern_Score"].notna()].tail().index:
114
+ # row = df.loc[idx, ["Open", "High", "Low", "Close"]]
115
+ # x_start = row.min()
116
+ # if x_start < 200:
117
+ # x_start -= 2
118
+ # x_end = x_start - 4
119
+ # elif x_start < 500:
120
+ # x_start -= 4
121
+ # x_end = x_start - 4
122
+ # else:
123
+ # x_start -= 8
124
+ # x_end = x_start - 6
125
+ # p.add_layout(Arrow(
126
+ # end=NormalHead(fill_color="black"),
127
+ # line_color="black",
128
+ # x_start = x_start,
129
+ # x_end = x_end,
130
+ # y_start = idx,
131
+ # y_end=idx
132
+ # ))
133
+
134
+
135
+ # plot candlestick wicks with HoverTool
136
+ p.add_tools(HoverTool(
137
+ renderers=[p.segment("index", "High", "index", "Low", source=cds, color="black", line_width=1)],
138
+ tooltips=[
139
+ ("Date","@Date{%F}"),
140
+ ("Open","@Open{0.2f}"),
141
+ ("High", "@High{0.2f}"),
142
+ ("Low", "@Low{0.2f}"),
143
+ ("Close", "@Close{0.2f}"),
144
+ ],
145
+ formatters={"@Date":"datetime"}
146
+ ))
147
+
148
+ # plot candlestick bars
149
+ for data, color in [(cds_inc, "#26a69a"), (cds_dec, "#ef5350")]:
150
+ p.vbar("index", 0.5, "Open", "Close", source=data, fill_color=color, line_color="black", line_width=1)
151
+
152
+ # plot moving average with HoverTool
153
+ for period, color in zip([5,20,200], Spectral4):
154
+ p.add_tools(HoverTool(
155
+ renderers=[p.line(
156
+ "index",
157
+ f"sma{period}",
158
+ source=cds,
159
+ line_width=2,
160
+ alpha=0.8,
161
+ color=color,
162
+ legend_label=f'SMA {period}\t')],
163
+ tooltips=[
164
+ (f"SMA {period}", "@sma%s{0.2f}" %(period)),
165
+ ],
166
+ ))
167
+
168
+ # legend setup
169
+ p.legend.location = "top_left"
170
+ p.legend.click_policy="hide"
171
+ p.legend.orientation="horizontal"
172
+
173
+ # generate script and div
174
+ script, div = components(p)
175
+
176
+ return script, div
app/requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ Flask==1.1.1
2
+ gunicorn==20.1.0
3
+ pandas==1.2.5
4
+ yfinance==0.1.59
5
+ TA-Lib==0.4.20
6
+ bokeh==2.3.2
app/templates/index.html ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <html lang="en">
2
+ <head>
3
+ <meta charset="UTF-8">
4
+ <meta http-equiv="X-UA-Compatible" content="IE=edge">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <link rel="stylesheet" href="./assets/css/styles.css" type="text/css">
7
+ <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.5.1/jquery.min.js"></script>
8
+ <script type="text/javascript">
9
+ window.onload = function() {
10
+ $("#loading").hide();
11
+ };
12
+ function loading(){
13
+ $("#content").hide();
14
+ $("#loading").show();
15
+ }
16
+ </script>
17
+ {% block head %}{% endblock %}
18
+ <title>Candlestick Screener</title>
19
+ </head>
20
+ <body>
21
+ <div id="loading">
22
+ <!-- <img src="/assets/img/loading.gif"> -->
23
+ <img src="https://cdn.dribbble.com/users/115601/screenshots/5356365/loading.gif">
24
+ </div>
25
+ <div id="content">
26
+ <h1>Candlestick Screener</h1>
27
+ <form>
28
+ <label for="pattern">Find pattern:</label>
29
+ <select name="pattern">
30
+ {% if not selected %}
31
+ <option value="" selected disabled hidden></option>
32
+ {% endif %}
33
+ {% for pattern in patterns %}
34
+ {% if selected == pattern %}
35
+ <option value={{ pattern }} selected>{{ patterns[pattern] }}</option>
36
+ {% else %}
37
+ <option value={{ pattern }}>{{ patterns[pattern] }}</option>
38
+ {% endif %}
39
+ {% endfor %}
40
+ </select>
41
+ <input type="submit" value="search" onclick="loading();"/>
42
+ </form>
43
+ <form method="POST">
44
+ Database updated at: {{ last_update_time }}
45
+ <button type="submit" value="Update Database" onclick="loading();">
46
+ Update Database
47
+ </button>
48
+ </form>
49
+ {% block contents %}{% endblock %}
50
+ </div>
51
+ </body>
52
+ </html>
app/templates/no_pattern_found.html ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {% extends "index.html" %}
2
+
3
+ {% block contents %}
4
+
5
+ <h3>Candlestick pattern is not present in the last candlesticks for all stocks</h3>
6
+
7
+ {% endblock %}
app/templates/plot.html ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {% extends "index.html" %}
2
+
3
+ {% block head %}
4
+ {{ js_resources|indent(4)|safe }}
5
+ {{ css_resources|indent(4)|safe }}
6
+ {% for plot_script in plot_scripts %}
7
+ {{ plot_script|indent(4)|safe }}
8
+ {% endfor %}
9
+ {% endblock %}
10
+
11
+ {% block contents %}
12
+ {% for plot_div in plot_divs %}
13
+ {{ plot_div | safe }}
14
+ {% endfor %}
15
+ {% endblock %}
app/update_database.py ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import sqlite3
3
+ import json
4
+ import datetime
5
+ import time
6
+ import yfinance as yfi
7
+ import talib
8
+ import warnings
9
+ warnings.filterwarnings("ignore")
10
+
11
+ # retrieve pattern data
12
+ with open("dataset/patterns.json") as f:
13
+ patterns = json.load(f)
14
+
15
+
16
+ def get_last_update_time():
17
+ with sqlite3.connect("dataset/ihsg.db") as con:
18
+ return pd.read_sql("SELECT MAX(time_updated) FROM historical", con=con).values[0][0][:19]
19
+
20
+ def find_patterns(df):
21
+ result = pd.DataFrame(
22
+ columns=[
23
+ "Date",
24
+ "Kode",
25
+ "Pattern",
26
+ "Pattern_Score",
27
+ "Open_Close_Change",
28
+ "High_Low_Change",
29
+ ]
30
+ )
31
+ for attr, pattern in patterns.items():
32
+ scores = getattr(talib, attr)(df["Open"], df["High"], df["Low"], df["Close"])
33
+ mask = scores != 0
34
+ temp_result = df[mask]
35
+ if len(temp_result) > 0:
36
+ temp_result = temp_result.assign(
37
+ Open_Close_Change=(temp_result["Close"] - temp_result["Open"]) / temp_result["Open"],
38
+ High_Low_Change=(temp_result["High"] - temp_result["Low"]) / temp_result["Low"],
39
+ Pattern=[pattern] * len(temp_result),
40
+ Pattern_Score=scores[mask].values,
41
+ )[result.columns]
42
+ result = result.append(temp_result)
43
+ result = result.assign(time_updated = datetime.datetime.now())
44
+ return result
45
+
46
+
47
+ def update_database():
48
+
49
+ with sqlite3.connect("dataset/ihsg.db") as con:
50
+ start_date = datetime.datetime.strptime(
51
+ pd.read_sql("SELECT MAX(Date) FROM historical", con=con).values[0][0],
52
+ "%Y-%m-%d %H:%M:%S"
53
+ )
54
+ start_date += pd.offsets.DateOffset(days=1)
55
+ start_date = datetime.datetime.strftime(start_date, "%Y-%m-%d")
56
+ end_date = datetime.datetime.now()
57
+ if (end_date.hour) < 15:
58
+ end_date -= pd.offsets.DateOffset(days = 1)
59
+ end_date = datetime.datetime.strftime(end_date, "%Y-%m-%d")
60
+ ihsg = (
61
+ yfi.download("^JKSE", start=start_date, end=end_date, progress=False)
62
+ .dropna()
63
+ )[start_date:end_date]
64
+ print(f"New Data IHSG {start_date}-{end_date}\n", len(ihsg), " rows\t")
65
+ if len(ihsg) > 0:
66
+ print(ihsg)
67
+ ihsg = (
68
+ ihsg.assign(
69
+ Kode="IHSG",
70
+ time_updated = datetime.datetime.now(),
71
+ )
72
+ .reset_index()
73
+ )[["Date", "Kode", "Open", "High", "Low", "Close", "Volume", "time_updated"]]
74
+ ihsg.to_sql("historical", if_exists="append", con=con, index=False)
75
+ tickers = pd.read_sql(
76
+ """
77
+ SELECT DISTINCT Kode FROM historical
78
+ WHERE Kode != "IHSG"
79
+ """,
80
+ con=con,
81
+ ).iloc[:,0].to_list()
82
+ print("UPDATING historical TABLE..")
83
+ for i in range(0, len(tickers), 50):
84
+ ticker = [f"{kode}.JK" for kode in tickers[i : i + 50]]
85
+ df = (
86
+ yfi.download(ticker, start=start_date, end=end_date, progress=False)
87
+ .T.unstack(level=1)
88
+ .T.reset_index()
89
+ .dropna()
90
+ .rename(columns={"level_1": "Kode"})
91
+ )[["Date", "Kode", "Open", "High", "Low", "Close", "Volume"]]
92
+ df["Kode"] = df["Kode"].str.replace(".JK", "")
93
+ df = df.assign(time_updated = datetime.datetime.now())
94
+ df.to_sql("historical", if_exists="append", con=con, index=False)
95
+
96
+
97
+ # update patterns database
98
+ tickers = ["IHSG"] + tickers
99
+ start = time.time()
100
+ for i, kode in enumerate(tickers):
101
+ print(f"Finding Patterns for {kode} #{i+1}\t\t time elapsed = {time.time() - start:.2f} s")
102
+ try:
103
+ search_result = find_patterns(df=pd.read_sql(f"""
104
+ SELECT *
105
+ FROM historical
106
+ WHERE Kode = '{kode}'
107
+ ORDER BY Date
108
+ """,
109
+ con=con,
110
+ ))
111
+ if i == 0:
112
+ search_result.to_sql("patterns", if_exists="replace", con=con, index=False)
113
+ else:
114
+ search_result.to_sql("patterns", if_exists="append", con=con, index=False)
115
+ except:
116
+ pass
ta-lib-0.4.0-src.tar.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ff41efcb1c011a4b4b6dfc91610b06e39b1d7973ed5d4dee55029a0ac4dc651
3
+ size 1330299