Skip to content

Commit

Permalink
-made all model files use the config file, added defaults to said con…
Browse files Browse the repository at this point in the history
…fig file
  • Loading branch information
atloo1 committed Jul 22, 2016
1 parent 98a1448 commit 8fb5cde
Show file tree
Hide file tree
Showing 5 changed files with 93 additions and 15 deletions.
14 changes: 11 additions & 3 deletions adaboost_model.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,17 @@
__author__ = 'hao yuan'
from sklearn.ensemble import AdaBoostRegressor
from sklearn.tree import DecisionTreeRegressor
import configuration_parser


def get():
model = AdaBoostRegressor(DecisionTreeRegressor(max_depth=12),
n_estimators=275)
return model
config = configuration_parser.parse()
estimators = config.getint(__name__, 'estimators')
lr = config.getfloat(__name__, 'learning rate')
loss = config.get(__name__, 'loss function')
exec('max_depth=' + config.get(__name__, 'max_depth'), locals(), globals())
min_samples_split = config.getint(__name__, 'min_samples_split')
min_samples_leaf = config.getint(__name__, 'min_samples_leaf')
return AdaBoostRegressor(DecisionTreeRegressor(max_depth=max_depth,min_samples_split=min_samples_split,
min_samples_leaf=min_samples_leaf),n_estimators=estimators,loss=loss,
learning_rate=lr)
27 changes: 27 additions & 0 deletions default.conf
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,40 @@ test_cases = ${default:test_cases}
[LeaveOutAlloyCV]
save_path = ../DBTT/graphs/leaveoutAlloy/{}.png

[dtr_model]
max_depth = 5
min_samples_split = 2
min_samples_leaf = 1
split criterion = mse

[gkrr_model]
alpha = 0.00139
coef0 = 1
degree = 3
gamma = 0.518
kernel = rbf

[lkrr_model]
alpha = 0.00518
gamma = 0.518
kernel = laplacian

[randomforest_model]
estimators = 100
max_depth = 5
min_samples_split = 2
min_samples_leaf = 1
max_leaf_nodes = None
jobs = 1

[adaboost_model]
estimators = 275
max_depth = 12
min_samples_split = 2
min_samples_leaf = 1
learning rate = 1
loss function = linear

#minmax, size, transfer_function are the verbatim arguments for neurolab.net.newff()
#training_algorithm is the verbatim 'support train fcn' for neurolab.train omitting 'train_'
#see: https://pythonhosted.org/neurolab/lib.html#module-neurolab.net
Expand Down
11 changes: 11 additions & 0 deletions dtr_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import configuration_parser
import sklearn.tree as tree

def get():
config = configuration_parser.parse()
exec('max_depth=' + config.get(__name__, 'max_depth'),locals(),globals())
min_samples_split = config.getint(__name__, 'min_samples_split')
min_samples_leaf = config.getint(__name__, 'min_samples_leaf')
criterion = config.get(__name__, 'split criterion')
return tree.DecisionTreeRegressor(criterion=criterion,max_depth=max_depth,min_samples_leaf=min_samples_leaf,
min_samples_split=min_samples_split)
8 changes: 6 additions & 2 deletions lkrr_model.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
from sklearn.kernel_ridge import KernelRidge
import configuration_parser

__author__ = 'haotian'


def get():

return KernelRidge(alpha= .00518, gamma = .518, kernel='laplacian')
config = configuration_parser.parse()
alpha = config.getfloat(__name__, 'alpha')
gamma = config.getfloat(__name__, 'gamma')
kernel = config.get(__name__, 'kernel')
return KernelRidge(alpha=alpha,gamma=gamma,kernel=kernel)
48 changes: 38 additions & 10 deletions randomforest_model.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,43 @@
from sklearn.ensemble import RandomForestRegressor

import configuration_parser
import ast
__author__ = 'hao yuan'


def get():
model = RandomForestRegressor(n_estimators=100,
max_features='auto',
max_depth=5,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0,
max_leaf_nodes=None,
n_jobs=1)
return model
config = configuration_parser.parse()
estimators = config.getint(__name__, 'estimators')
exec('max_depth = ' + config.get(__name__, 'max_depth'),locals(),globals())
min_samples_split = config.getint(__name__, 'min_samples_split')
min_samples_leaf = config.getint(__name__, 'min_samples_leaf')
exec('max_leaf_nodes=' + config.get(__name__, 'max_leaf_nodes'),locals(),globals())
jobs = config.getint(__name__, 'jobs')

model = RandomForestRegressor(n_estimators=estimators,
max_depth=max_depth,
min_samples_split=min_samples_split,
min_samples_leaf=min_samples_leaf,
max_leaf_nodes=max_leaf_nodes,
n_jobs=jobs)
return model

def __executeStringNoneOrNumber__(readFromConfig,varAssignment):
try:
int(readFromConfig)
return varAssignment + '=' + readFromConfig # a number
except:
try:
float(readFromConfig)
return varAssignment + '=' + readFromConfig # a number
except:
try:
if exec(readFromConfig) == None: return varAssignment + '=' + readFromConfig # None
except:
return varAssignment + '= \' ' + readFromConfig + '\'' # string

def __executeStringOrInt__(readFromConfig,varAssignment):
try:
int(readFromConfig)
return varAssignment + '=' + readFromConfig # a number
except:
return varAssignment + '= \' ' + readFromConfig + '\'' # string

0 comments on commit 8fb5cde

Please sign in to comment.