MLCryptoForecaster / MLCryptoForecasterAllAssetsTPSL.py
solanaexpert's picture
Update MLCryptoForecasterAllAssetsTPSL.py
03e6a37 verified
import os
import pandas as pd
import numpy as np
from datetime import timedelta
from binance.client import Client
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import classification_report
import ta
# Function to log results to both console and file
# Does not insert blank lines; blank lines added explicitly after each asset block
def log_results(message, filename="predictions_results.txt"):
print(message)
with open(filename, "a") as f:
f.write(message + "\n")
# Initialize Binance client
client = Client()
# Settings
interval = Client.KLINE_INTERVAL_4HOUR
result_file = "predictions_results.txt"
# Delete the results file if it exists for a fresh start
if os.path.exists(result_file):
os.remove(result_file)
# Initialize result file header
with open(result_file, "w") as f:
f.write("Asset,Time,Price,Prediction,Optimal_UP_TP,Optimal_UP_SL,Optimal_DN_TP,Optimal_DN_SL\n")
# Get USDT-quoted trading symbols
symbols = [s['symbol'] for s in client.get_exchange_info()['symbols']
if s['status']=='TRADING' and s['quoteAsset']=='USDT']
# Optimize take-profit / stop-loss function
def optimize_tp_sl(df, signals, side, pgrid, lgrid):
best = (0, 0, -np.inf)
prices = df['close'].values
idxs = np.where(signals == side)[0]
for tp in pgrid:
for sl in lgrid:
rets = []
for i in idxs:
entry = prices[i]
for j in range(i+1, min(i+11, len(prices))):
ret = (prices[j] - entry) / entry if side == 1 else (entry - prices[j]) / entry
if ret >= tp or ret <= -sl:
rets.append(np.sign(ret) * min(abs(ret), max(tp, sl)))
break
if rets:
avg_ret = np.mean(rets)
if avg_ret > best[2]:
best = (tp, sl, avg_ret)
return best
# Process each symbol
for symbol in symbols:
log_results(f"=== {symbol} ===", result_file)
# Load or download historical data
data_file = f"{symbol}_data_4h_full.csv"
if os.path.exists(data_file):
df = pd.read_csv(data_file, index_col=0, parse_dates=True)
last_ts = df.index[-1]
start = (last_ts + timedelta(hours=4)).strftime("%d %B %Y %H:%M:%S")
new = client.get_historical_klines(symbol, interval, start)
if new:
new_df = pd.DataFrame(new, columns=['timestamp','open','high','low','close','volume',
'close_time','quote_av','trades','tb_base_av','tb_quote_av','ignore'])
new_df = new_df[['timestamp','open','high','low','close','volume']].astype(float)
new_df['timestamp'] = pd.to_datetime(new_df['timestamp'], unit='ms')
new_df.set_index('timestamp', inplace=True)
df = pd.concat([df, new_df]).drop_duplicates()
df.to_csv(data_file)
else:
klines = client.get_historical_klines(symbol, interval, "01 December 2021")
df = pd.DataFrame(klines, columns=['timestamp','open','high','low','close','volume',
'close_time','quote_av','trades','tb_base_av','tb_quote_av','ignore'])
df = df[['timestamp','open','high','low','close','volume']].astype(float)
df['timestamp'] = pd.to_datetime(df['timestamp'], unit='ms')
df.set_index('timestamp', inplace=True)
df.to_csv(data_file)
# Compute technical indicators
df['rsi'] = ta.momentum.RSIIndicator(df['close'], window=14).rsi()
df['macd'] = ta.trend.MACD(df['close']).macd()
for s in [10, 20, 50, 100]: df[f'ema_{s}'] = df['close'].ewm(span=s).mean()
for w in [10, 20, 50, 100]: df[f'sma_{w}'] = df['close'].rolling(window=w).mean()
bb = ta.volatility.BollingerBands(df['close'], window=20, window_dev=2)
df['bbw'] = (bb.bollinger_hband() - bb.bollinger_lband()) / bb.bollinger_mavg()
df['atr'] = ta.volatility.AverageTrueRange(df['high'], df['low'], df['close'], window=14).average_true_range()
df['adx'] = ta.trend.ADXIndicator(df['high'], df['low'], df['close'], window=14).adx()
st = ta.momentum.StochasticOscillator(df['high'], df['low'], df['close'], window=14)
df['st_k'] = st.stoch(); df['st_d'] = st.stoch_signal()
df['wr'] = ta.momentum.WilliamsRIndicator(df['high'], df['low'], df['close'], lbp=14).williams_r()
df['cci'] = ta.trend.CCIIndicator(df['high'], df['low'], df['close'], window=20).cci()
df['mom'] = df['close'] - df['close'].shift(10)
ichi = ta.trend.IchimokuIndicator(df['high'], df['low'], window1=9, window2=26, window3=52)
df['span_a'] = ichi.ichimoku_a(); df['span_b'] = ichi.ichimoku_b()
df.dropna(inplace=True)
# Label signals based on Ichimoku cloud
df['signal'] = np.select([
(df['close'] > df['span_a']) & (df['close'] > df['span_b']),
(df['close'] < df['span_a']) & (df['close'] < df['span_b'])
], [1, 0], default=-1)
# Train/test split
features = [c for c in df.columns if c not in ['open', 'high', 'low', 'close', 'volume', 'signal']]
X, y = df[features], df['signal']
Xtr, Xte, ytr, yte = train_test_split(X, y, test_size=0.2, shuffle=False)
model = RandomForestClassifier(n_estimators=200, class_weight='balanced', random_state=42)
model.fit(Xtr, ytr)
ypr = model.predict(Xte)
# Log classification report
report = classification_report(yte, ypr, zero_division=0)
log_results(f"Classification report for {symbol}: {report}", result_file)
# Predict latest trend and log time & price
latest_df = X.iloc[-1:] # DataFrame for prediction
trend_label = model.predict(latest_df)[0]
pred_time = df.index[-1]
pred_price = df['close'].iloc[-1]
trend_str = {1:'Uptrend', 0:'Downtrend', -1:'Neutral'}[trend_label]
log_results(f"Time: {pred_time}, Price: {pred_price:.2f}, Prediction: {trend_str}", result_file)
# Optimize TP/SL and log results
hist_sign = model.predict(X) # Pass DataFrame to avoid warning
pgrid = np.arange(0.01, 0.1, 0.01)
lgrid = np.arange(0.01, 0.1, 0.01)
up_tp, up_sl, _ = optimize_tp_sl(df, hist_sign, 1, pgrid, lgrid)
dn_tp, dn_sl, _ = optimize_tp_sl(df, hist_sign, 0, pgrid, lgrid)
log_results(f"Optimal UP TP/SL: +{up_tp*100:.1f}% / -{up_sl*100:.1f}%", result_file)
log_results(f"Optimal DN TP/SL: +{dn_tp*100:.1f}% / -{dn_sl*100:.1f}%", result_file)
# Add a blank line after each asset block
with open(result_file, "a") as f:
f.write("\n")
# End of processing
log_results("All assets processed.", result_file)