forked from rahul166/Python-scripts
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathhyperopt.py
More file actions
54 lines (39 loc) · 1.51 KB
/
hyperopt.py
File metadata and controls
54 lines (39 loc) · 1.51 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
'''
Hyperoptimization method I used for tunning the parameters of light gbm
It can be used for tunning the parameters for a model
'''
from hyperopt import hp
from hyperopt import fmin, tpe, hp, Trials
import xgboost as xgb
from sklearn.model_selection import cross_val_score,StratifiedKFold
class HyperoptMethod():
def __init__(self,clf,x_train,y_train):
self.classifier=clf
self.x_train=x_train
self.y_train=y_train
def score(self,params):
"""
It gives the acuuracy at it each trials
Returns:
[score(float)] -- [Accuracy score]
"""
score = cross_val_score(self.classifier, self.x_train, self.y_train , scoring='accuracy', cv=StratifiedKFold()).mean()
return score
def optimize(self):
"""
It is the main optimization function
Arguments:
Trials {[type]} -- [description]
Returns:
[best] -- [optimized params]
"""
space = {
'n_estimators' : hp.choice('n_estimators', [i for i in range(100,1000)]),
'max_depth' : hp.choice('max_depth', [i for i in range(1,13)]),
'num_leaves': hp.choice('num_leaves', [i for i in range(1,128)]),
'colsample_bytree' : hp.quniform('colsample_bytree', 0.5, 1, 0.05),
}
trials=Trials()
best = fmin(self.score,space=space,algo=tpe.suggest,max_evals=10,trials=trials)
print(best)
return best