Kolpitor commited on
Commit
675fe68
·
1 Parent(s): 5fa8ed1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -3
app.py CHANGED
@@ -10,6 +10,9 @@ os.system('pip install graphviz')
10
  os.system('pip install python-pydot')
11
  os.system('pip install python-pydot-ng')
12
  os.system('pip install -U scikit-learn scipy matplotlib')
 
 
 
13
 
14
  from collections import namedtuple
15
  import altair as alt
@@ -22,6 +25,14 @@ import graphviz
22
  from sklearn.metrics import mean_squared_error
23
  from sklearn.model_selection import train_test_split
24
  import matplotlib.pyplot
 
 
 
 
 
 
 
 
25
 
26
  """
27
  # MLOPS
@@ -100,9 +111,36 @@ data_dmatrix = xgboost.DMatrix(data=x,label=y)
100
 
101
  X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=123)
102
 
103
- xg_reg = xgboost.XGBRegressor(colsample_bytree = colsample_bytree_input, learning_rate = learning_rate_input, max_depth = max_depth_input, alpha = alpha_input, n_estimators = n_estimators_input)
104
-
105
- xg_reg.fit(X_train,y_train)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
106
 
107
  preds = xg_reg.predict(X_test)
108
 
@@ -119,6 +157,8 @@ st.write((cv_results["test-rmse-mean"]).tail(1))
119
 
120
  xg_reg = xgboost.train(params=params, dtrain=data_dmatrix, num_boost_round=10)
121
 
 
 
122
  #xgboost.plot_tree(xg_reg,num_trees=0)
123
  #matplotlib.pyplot.rcParams['figure.figsize'] = [200, 200]
124
  #matplotlib.pyplot.show()
@@ -126,3 +166,8 @@ xg_reg = xgboost.train(params=params, dtrain=data_dmatrix, num_boost_round=10)
126
  #xgboost.plot_importance(xg_reg)
127
  #matplotlib.pyplot.rcParams['figure.figsize'] = [5, 5]
128
  #matplotlib.pyplot.show()
 
 
 
 
 
 
10
  os.system('pip install python-pydot')
11
  os.system('pip install python-pydot-ng')
12
  os.system('pip install -U scikit-learn scipy matplotlib')
13
+ os.system('pip install wandb --upgrade')
14
+ os.system('pip install tensorboardX --upgrade')
15
+ os.system('wandb login 5a0e81f39777351977ce52cf57ea09c4f48f3d93 --relogin')
16
 
17
  from collections import namedtuple
18
  import altair as alt
 
25
  from sklearn.metrics import mean_squared_error
26
  from sklearn.model_selection import train_test_split
27
  import matplotlib.pyplot
28
+ %load_ext tensorboard
29
+ import os
30
+ import datetime
31
+ from tensorboardX import SummaryWriter
32
+ import wandb
33
+ from wandb.xgboost import wandb_callback
34
+
35
+ wandb.init(project="australian_rain", entity="epitech1")
36
 
37
  """
38
  # MLOPS
 
111
 
112
  X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=123)
113
 
114
+ class TensorBoardCallback(xgboost.callback.TrainingCallback):
115
+ def __init__(self, experiment: str = None, data_name: str = None):
116
+ self.experiment = experiment or "logs"
117
+ self.data_name = data_name or "test"
118
+ self.datetime_ = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
119
+ self.log_dir = f"runs/{self.experiment}/{self.datetime_}"
120
+ self.train_writer = SummaryWriter(log_dir=os.path.join(self.log_dir, "train/"))
121
+ if self.data_name:
122
+ self.test_writer = SummaryWriter(log_dir=os.path.join(self.log_dir, f"{self.data_name}/"))
123
+
124
+ def after_iteration(
125
+ self, model, epoch: int, evals_log: xgboost.callback.TrainingCallback.EvalsLog
126
+ ) -> bool:
127
+ if not evals_log:
128
+ return False
129
+
130
+ for data, metric in evals_log.items():
131
+ for metric_name, log in metric.items():
132
+ score = log[-1][0] if isinstance(log[-1], tuple) else log[-1]
133
+ if data == "train":
134
+ self.train_writer.add_scalar(metric_name, score, epoch)
135
+ else:
136
+ self.test_writer.add_scalar(metric_name, score, epoch)
137
+
138
+ return False
139
+
140
+ xg_reg = xgboost.XGBRegressor(colsample_bytree = colsample_bytree_input, learning_rate = learning_rate_input, max_depth = max_depth_input, alpha = alpha_input, n_estimators = n_estimators_input, eval_metric = ['rmse', 'error', 'logloss', 'map'],
141
+ callbacks=[TensorBoardCallback(experiment='exp_1', data_name='test')])
142
+
143
+ xg_reg.fit(X_train,y_train, eval_set=[(X_train, y_train)])
144
 
145
  preds = xg_reg.predict(X_test)
146
 
 
157
 
158
  xg_reg = xgboost.train(params=params, dtrain=data_dmatrix, num_boost_round=10)
159
 
160
+ os.system('tensorboard --logdir runs')
161
+
162
  #xgboost.plot_tree(xg_reg,num_trees=0)
163
  #matplotlib.pyplot.rcParams['figure.figsize'] = [200, 200]
164
  #matplotlib.pyplot.show()
 
166
  #xgboost.plot_importance(xg_reg)
167
  #matplotlib.pyplot.rcParams['figure.figsize'] = [5, 5]
168
  #matplotlib.pyplot.show()
169
+
170
+ #xg_reg = xgboost.train(params=params, dtrain=data_dmatrix, num_boost_round=10, callbacks=[wandb_callback()])
171
+
172
+ # MLOPS - W&B analytics
173
+ # added the wandb to the callbacks