-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmake_pdps.py
More file actions
43 lines (33 loc) · 1.79 KB
/
make_pdps.py
File metadata and controls
43 lines (33 loc) · 1.79 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
from matplotlib import pyplot as plt
from pdpbox import pdp
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.model_selection import KFold
from data import *
def run(model, target, logfile):
print('Using model: {}'.format(model), file=logfile)
df = get_data(for_training=True)
kfolds = get_folds(df, 3)
for f_ind, fold in enumerate(kfolds): # for each fold
training, testing = preprocess(pd.concat(kfolds[:f_ind] + kfolds[f_ind + 1:]), fold)
X_train, y_train = training.drop(target, axis=1), training[target]
X_test, y_test = testing.drop(target, axis=1), testing[target]
X_train, X_test = X_train.drop('site', axis=1), X_test.drop('site', axis=1)
model.fit(X_train, y_train)
pred_train = model.predict(X_train)
pred_test = model.predict(X_test)
L1_tr = np.average(np.abs(np.subtract(pred_train, y_train.values.ravel())))
L1_ts = np.average(np.abs(np.subtract(pred_test, y_test.values.ravel())))
print('fold {}: trL1: {} tsL1: {}'.format(f_ind+1, round(L1_tr, 3), round(L1_ts, 3)), file=logfile)
imp = model.feature_importances_
inds = np.argsort(imp)[::-1]
for ind in inds[:10]:
pdp_goals = pdp.pdp_isolate(model=model, dataset=X_test,
model_features=X_train.columns.values, feature=X_train.columns.values[ind])
f, ax = pdp.pdp_plot(pdp_goals, X_train.columns.values[ind])
plt.savefig('./output/pdp/{}_{}_{}.png'.format(target, X_train.columns.values[ind], str(f_ind+1)))
if __name__ == '__main__':
model = GradientBoostingRegressor(n_estimators=20)
target = 'retentionP'
logfilename = './output/pdp/{}_zlog.txt'.format(target)
with open(logfilename, 'w') as logfile:
run(model, target, logfile)