-
Notifications
You must be signed in to change notification settings - Fork 0
/
best_gradient_boosting.py
58 lines (43 loc) · 1.74 KB
/
best_gradient_boosting.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.model_selection import GridSearchCV
from sklearn.metrics import r2_score
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
def plot_results(X,y, gs):
plt.scatter(y, gs.predict(X), color = 'blue')
plt.plot(y,y, color = 'red')
plt.title('Gradient Boosting')
plt.xlabel('True Y')
plt.ylabel('Predicted Y')
plt.show()
return(0)
def best_model(X_train, X_test, y_train, y_test, plot_ind, eval_parm):
X = np.concatenate((X_train, X_test), axis=0)
y = np.concatenate((y_train, y_test), axis=0)
if eval_parm == 'deep':
parameters = {'n_estimators': (50, 100), \
'loss': ('ls', 'lad', 'huber'), \
'criterion': ('friedman_mse', 'mse', 'mae'), \
'max_depth': (3, 4, 5), \
'min_samples_split': (2, 3, 4), \
'min_samples_leaf': (2, 3), \
'max_features': ('auto', 'log2', None) \
}
elif eval_parm == 'test':
parameters = {'n_estimators': (50, 100), \
'loss': ('ls', 'lad'), \
'min_samples_leaf': (2, 3), \
'max_features': ('auto', 'log2', None) \
}
regressor = GradientBoostingRegressor(random_state = 0)
gs = GridSearchCV(regressor, parameters, cv=5)
gs.fit(np.concatenate((X_train, X_test), axis=0), \
np.concatenate((y_train, y_test), axis=0))
# Predicting the Test set results
y_pred = gs.predict(X_test)
score = r2_score(y_test, y_pred)
return_parm= {'trained_model': gs.best_estimator_, 'score': score}
if plot_ind:
plot_results(X,y, gs)
return (return_parm)