-
Notifications
You must be signed in to change notification settings - Fork 1
/
xgb.conf
34 lines (29 loc) · 1.04 KB
/
xgb.conf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
# General Parameters, see comment for each definition
# can be gbtree or gblinear
booster = gbtree
# choose logistic regression loss function for binary classification
objective = reg:squarederror
#objective = reg:squaredlogerror
# Tree Booster Parameters
# step size shrinkage
eta = 0.5
# minimum loss reduction required to make a further partition
gamma = 1.0
# minimum sum of instance weight(hessian) needed in a child
min_child_weight = 1
# maximum depth of a tree
max_depth = 10
# Task Parameters
# the number of round to do boosting
num_round = 100
# 0 means do not save any model except the final round model
save_period = 1
# The path of training data
data = "numerical/train.txt#dtrain1.cache"
# The path of validation data, used to monitor training process, here [test] sets name of the validation set
eval[train] = "numerical/train.txt#dtrain1.cache"
eval[test] = "numerical/test.txt#dtest1.cache"
# The path of test data
train:data = "numerical/train.txt#dtrain1.cache"
test:data = "numerical/test.txt#dtest1.cache"
model_dir = "numerical"