其他分享
首页 > 其他分享> > LightGBM与评分卡

LightGBM与评分卡

作者:互联网

调参策略

最大化 off_ks + 0.8(off_ks-train_ks)

import pandas as pd
from sklearn.metrics import roc_auc_score,roc_curve,auc
from sklearn.model_selection import train_test_split
from sklearn import metrics
from sklearn.linear_model import LogisticRegression
import numpy as np
import random
import math
import time
import lightgbm as lgb
data = pd.read_csv('Acard.txt')
data.head()
obs_mth bad_ind uid td_score jxl_score mj_score rh_score zzc_score zcx_score person_info finance_info credit_info act_info
0 2018-10-31 0.0 A10000005 0.675349 0.144072 0.186899 0.483640 0.928328 0.369644 -0.322581 0.023810 0.00 0.217949
1 2018-07-31 0.0 A1000002 0.825269 0.398688 0.139396 0.843725 0.605194 0.406122 -0.128677 0.023810 0.00 0.423077
2 2018-09-30 0.0 A1000011 0.315406 0.629745 0.535854 0.197392 0.614416 0.320731 0.062660 0.023810 0.10 0.448718
3 2018-07-31 0.0 A10000481 0.002386 0.609360 0.366081 0.342243 0.870006 0.288692 0.078853 0.071429 0.05 0.179487
4 2018-07-31 0.0 A1000069 0.406310 0.405352 0.783015 0.563953 0.715454 0.512554 -0.261014 0.023810 0.00 0.423077
data.shape
(95806, 13)
# 看一下月份分布,我们用最后一个月做为跨时间验证集合
data.obs_mth.unique()
array(['2018-10-31', '2018-07-31', '2018-09-30', '2018-06-30',
       '2018-11-30'], dtype=object)
# 划分训练集和验证集
df_train = data[data.obs_mth != '2018-11-30'].reset_index().copy()
val = data[data.obs_mth == '2018-11-30'].reset_index().copy()
# 这是我们全部的变量,info结尾的是自己做的无监督系统输出的个人表现,score结尾的是收费的外部征信数据
lst = ['person_info','finance_info','credit_info','act_info','td_score','jxl_score','mj_score','rh_score']
df_train = df_train.sort_values(by = 'obs_mth',ascending = False)
df_train.head()
index obs_mth bad_ind uid td_score jxl_score mj_score rh_score zzc_score zcx_score person_info finance_info credit_info act_info
0 0 2018-10-31 0.0 A10000005 0.675349 0.144072 0.186899 0.483640 0.928328 0.369644 -0.322581 0.023810 0.00 0.217949
33407 33407 2018-10-31 0.0 A2810176 0.146055 0.079922 0.250568 0.045240 0.766906 0.413713 0.013863 0.023810 0.00 0.269231
33383 33383 2018-10-31 0.0 A2807687 0.551366 0.300781 0.225007 0.045447 0.735733 0.684182 -0.261014 0.071429 0.03 0.269231
33379 33379 2018-10-31 0.0 A2807232 0.708547 0.769513 0.928457 0.739716 0.947453 0.361551 -0.128677 0.047619 0.00 0.269231
33376 33376 2018-10-31 0.0 A2806932 0.482248 0.116658 0.286273 0.056618 0.047024 0.890433 0.078853 0.047619 0.00 0.269231
df_train = df_train.sort_values(by = 'obs_mth',ascending = False)

rank_lst = []
for i in range(1,len(df_train)+1):
    rank_lst.append(i)
    
df_train['rank'] = rank_lst

df_train['rank'] = df_train['rank']/len(df_train)

pct_lst = []
for x in df_train['rank']:
    if x <= 0.2:
        x = 1
    elif x <= 0.4:
        x = 2
    elif x <= 0.6:
        x = 3
    elif x <= 0.8:
        x = 4
    else:
        x = 5
    pct_lst.append(x)
df_train['rank'] = pct_lst        
# train = train.drop('obs_mth',axis = 1)
df_train.head()
index obs_mth bad_ind uid td_score jxl_score mj_score rh_score zzc_score zcx_score person_info finance_info credit_info act_info rank
0 0 2018-10-31 0.0 A10000005 0.675349 0.144072 0.186899 0.483640 0.928328 0.369644 -0.322581 0.023810 0.00 0.217949 1
33272 33272 2018-10-31 0.0 A2798022 0.448084 0.748679 0.328625 0.132682 0.167770 0.727771 -0.322581 0.023810 0.00 0.269231 1
33253 33253 2018-10-31 0.0 A2796636 0.129880 0.158756 0.240867 0.828666 0.232677 0.049676 -0.261014 0.071429 0.44 0.282051 1
32983 32983 2018-10-31 0.0 A2773761 0.644977 0.562769 0.751521 0.482639 0.486205 0.844002 0.078853 0.095238 0.03 0.282051 1
33369 33369 2018-10-31 0.0 A2806416 0.698203 0.164079 0.634027 0.013182 0.883847 0.174621 -0.322581 0.071429 0.06 0.269231 1
df_train['rank'].groupby(df_train['rank']).count()
rank
1    15966
2    15966
3    15966
4    15966
5    15967
Name: rank, dtype: int64

bin_record = pd.DataFrame()
for col in col_lst:
cb.pct_bin(data,col,'label')
cb.plot_woe()
data[col] = cb.trans_to_woe(data[col])
rcd = cb.get_bin_stats()
if bin_record.empty:
bin_record = rcd
else:
bin_record = bin_record.append(rcd)
bin_record.head(8)

len(df_train)
79831
# 定义lgb函数
def LGB_test(train_x,train_y,test_x,test_y):
    from multiprocessing import cpu_count
    clf = lgb.LGBMClassifier(
        boosting_type='gbdt', 
        num_leaves=31, 
        reg_alpha=0.0, 
        reg_lambda=1,
        max_depth=2, 
        n_estimators=800,
        max_features = 140, 
        objective='binary',
        subsample=0.7, 
        colsample_bytree=0.7, 
        subsample_freq=1,
        learning_rate=0.05, 
        min_child_weight=50,
        random_state=None,
        n_jobs=cpu_count()-1,
        num_iterations = 800 #迭代次数
    )
    clf.fit(train_x, train_y,eval_set=[(train_x, train_y),(test_x,test_y)],eval_metric='auc',early_stopping_rounds=100)
    print(clf.n_features_)

    return clf,clf.best_score_[ 'valid_1']['auc']
feature_lst = {}
ks_train_lst = []
ks_test_lst = []
for rk in set(df_train['rank']):   
    
    # 测试集8.18以后作为跨时间验证集
    
    # 定义模型训练集与测试集
    ttest = df_train[df_train['rank'] ==  rk]
    ttrain = df_train[df_train['rank'] !=  rk]
    
    train = ttrain[lst]
    train_y = ttrain.bad_ind
    
    test = ttest[lst]
    test_y = ttest.bad_ind    
    
    start = time.time()
    model,auc = LGB_test(train,train_y,test,test_y)                    
    end = time.time()
    
    # 模型贡献度放在feture中
    feature = pd.DataFrame(
                {'name' : model.booster_.feature_name(),
                'importance' : model.feature_importances_
              }).sort_values(by =  ['importance'],ascending = False)
    
       
    # 计算训练集、测试集、验证集上的KS和AUC

    y_pred_train_lgb = model.predict_proba(train)[:, 1]
    y_pred_test_lgb = model.predict_proba(test)[:, 1]


    train_fpr_lgb, train_tpr_lgb, _ = roc_curve(train_y, y_pred_train_lgb)
    test_fpr_lgb, test_tpr_lgb, _ = roc_curve(test_y, y_pred_test_lgb)


    train_ks = abs(train_fpr_lgb - train_tpr_lgb).max()
    test_ks = abs(test_fpr_lgb - test_tpr_lgb).max()


    train_auc = metrics.auc(train_fpr_lgb, train_tpr_lgb)
    test_auc = metrics.auc(test_fpr_lgb, test_tpr_lgb)
    
    ks_train_lst.append(train_ks)
    ks_test_lst.append(test_ks)    

    feature_lst[str(rk)] = feature[feature.importance>=20].name

train_ks = np.mean(ks_train_lst)
test_ks = np.mean(ks_test_lst)

ft_lst = {}
for i in range(1,6):
    ft_lst[str(i)] = feature_lst[str(i)]

fn_lst=list(set(ft_lst['1']) & set(ft_lst['2']) 
    & set(ft_lst['3']) & set(ft_lst['4']) &set(ft_lst['5']))

print('train_ks: ',train_ks)
print('test_ks: ',test_ks)

print('ft_lst: ',fn_lst )
E:\Anaconda3\envs\sklearn\lib\site-packages\lightgbm\engine.py:116: UserWarning: Found `num_iterations` in params. Will use it instead of argument
  warnings.warn("Found `{}` in params. Will use it instead of argument".format(alias))


[1] training's auc: 0.671177    training's binary_logloss: 0.0837149    valid_1's auc: 0.593289 valid_1's binary_logloss: 0.122621
Training until validation scores don't improve for 100 rounds.
[2] training's auc: 0.790094    training's binary_logloss: 0.0824434    valid_1's auc: 0.748081 valid_1's binary_logloss: 0.120209
[3] training's auc: 0.794322    training's binary_logloss: 0.0819485    valid_1's auc: 0.769838 valid_1's binary_logloss: 0.119323
[4] training's auc: 0.793401    training's binary_logloss: 0.08136  valid_1's auc: 0.766694 valid_1's binary_logloss: 0.11835
[5] training's auc: 0.79442 training's binary_logloss: 0.0805306    valid_1's auc: 0.770955 valid_1's binary_logloss: 0.116824
[6] training's auc: 0.799359    training's binary_logloss: 0.0797782    valid_1's auc: 0.777701 valid_1's binary_logloss: 0.115472
[7] training's auc: 0.7986  training's binary_logloss: 0.0793654    valid_1's auc: 0.77792  valid_1's binary_logloss: 0.114819
[8] training's auc: 0.80116 training's binary_logloss: 0.0787197    valid_1's auc: 0.779639 valid_1's binary_logloss: 0.113766
[9] training's auc: 0.804811    training's binary_logloss: 0.0784944    valid_1's auc: 0.776231 valid_1's binary_logloss: 0.113544
[10]    training's auc: 0.803304    training's binary_logloss: 0.0781703    valid_1's auc: 0.774568 valid_1's binary_logloss: 0.113122
[11]    training's auc: 0.804755    training's binary_logloss: 0.0777157    valid_1's auc: 0.778525 valid_1's binary_logloss: 0.11235
[12]    training's auc: 0.80572 training's binary_logloss: 0.077203 valid_1's auc: 0.780386 valid_1's binary_logloss: 0.111528
[13]    training's auc: 0.805872    training's binary_logloss: 0.0769671    valid_1's auc: 0.780168 valid_1's binary_logloss: 0.111249
[14]    training's auc: 0.804747    training's binary_logloss: 0.0767342    valid_1's auc: 0.776101 valid_1's binary_logloss: 0.110985
[15]    training's auc: 0.806523    training's binary_logloss: 0.0763647    valid_1's auc: 0.776495 valid_1's binary_logloss: 0.110407
[16]    training's auc: 0.807263    training's binary_logloss: 0.0760476    valid_1's auc: 0.777377 valid_1's binary_logloss: 0.109954
[17]    training's auc: 0.807666    training's binary_logloss: 0.0758342    valid_1's auc: 0.778523 valid_1's binary_logloss: 0.109683
[18]    training's auc: 0.808026    training's binary_logloss: 0.0755496    valid_1's auc: 0.782706 valid_1's binary_logloss: 0.109231
[19]    training's auc: 0.807956    training's binary_logloss: 0.0752868    valid_1's auc: 0.782144 valid_1's binary_logloss: 0.108865
[20]    training's auc: 0.808187    training's binary_logloss: 0.07511  valid_1's auc: 0.781477 valid_1's binary_logloss: 0.10868
[21]    training's auc: 0.808443    training's binary_logloss: 0.0749012    valid_1's auc: 0.781449 valid_1's binary_logloss: 0.10841
[22]    training's auc: 0.80858 training's binary_logloss: 0.0746929    valid_1's auc: 0.781784 valid_1's binary_logloss: 0.108141
[23]    training's auc: 0.808537    training's binary_logloss: 0.0745555    valid_1's auc: 0.781804 valid_1's binary_logloss: 0.107996
[24]    training's auc: 0.808534    training's binary_logloss: 0.0743843    valid_1's auc: 0.782433 valid_1's binary_logloss: 0.107742
[25]    training's auc: 0.80871 training's binary_logloss: 0.074262 valid_1's auc: 0.782362 valid_1's binary_logloss: 0.10764
[26]    training's auc: 0.808796    training's binary_logloss: 0.0741263    valid_1's auc: 0.782014 valid_1's binary_logloss: 0.107489
[27]    training's auc: 0.809004    training's binary_logloss: 0.0739832    valid_1's auc: 0.783511 valid_1's binary_logloss: 0.107278
[28]    training's auc: 0.810629    training's binary_logloss: 0.0738869    valid_1's auc: 0.783018 valid_1's binary_logloss: 0.107224
[29]    training's auc: 0.811459    training's binary_logloss: 0.0737598    valid_1's auc: 0.783637 valid_1's binary_logloss: 0.107058
[30]    training's auc: 0.812163    training's binary_logloss: 0.0736609    valid_1's auc: 0.784984 valid_1's binary_logloss: 0.10695
[31]    training's auc: 0.811803    training's binary_logloss: 0.0735418    valid_1's auc: 0.784155 valid_1's binary_logloss: 0.106847
[32]    training's auc: 0.812089    training's binary_logloss: 0.0734718    valid_1's auc: 0.784006 valid_1's binary_logloss: 0.106825
[33]    training's auc: 0.81212 training's binary_logloss: 0.0733773    valid_1's auc: 0.784219 valid_1's binary_logloss: 0.106737
[34]    training's auc: 0.812082    training's binary_logloss: 0.0733098    valid_1's auc: 0.784139 valid_1's binary_logloss: 0.106721
[35]    training's auc: 0.812312    training's binary_logloss: 0.0732273    valid_1's auc: 0.783888 valid_1's binary_logloss: 0.106653
[36]    training's auc: 0.81221 training's binary_logloss: 0.0731534    valid_1's auc: 0.783731 valid_1's binary_logloss: 0.106591
[37]    training's auc: 0.81269 training's binary_logloss: 0.0730713    valid_1's auc: 0.784174 valid_1's binary_logloss: 0.106514
[38]    training's auc: 0.812679    training's binary_logloss: 0.072993 valid_1's auc: 0.78418  valid_1's binary_logloss: 0.106475
[39]    training's auc: 0.813185    training's binary_logloss: 0.0729372    valid_1's auc: 0.784127 valid_1's binary_logloss: 0.106471
[40]    training's auc: 0.813126    training's binary_logloss: 0.0728752    valid_1's auc: 0.784373 valid_1's binary_logloss: 0.106428
[41]    training's auc: 0.813399    training's binary_logloss: 0.0728172    valid_1's auc: 0.784511 valid_1's binary_logloss: 0.106386
[42]    training's auc: 0.813647    training's binary_logloss: 0.0727682    valid_1's auc: 0.784372 valid_1's binary_logloss: 0.106363
[43]    training's auc: 0.813684    training's binary_logloss: 0.0727066    valid_1's auc: 0.784718 valid_1's binary_logloss: 0.106314
[44]    training's auc: 0.813822    training's binary_logloss: 0.0726594    valid_1's auc: 0.7847   valid_1's binary_logloss: 0.106274
[45]    training's auc: 0.814282    training's binary_logloss: 0.0726142    valid_1's auc: 0.784598 valid_1's binary_logloss: 0.106242
[46]    training's auc: 0.814422    training's binary_logloss: 0.0725718    valid_1's auc: 0.784624 valid_1's binary_logloss: 0.106222
[47]    training's auc: 0.814488    training's binary_logloss: 0.0725269    valid_1's auc: 0.784789 valid_1's binary_logloss: 0.106203
[48]    training's auc: 0.814764    training's binary_logloss: 0.0724902    valid_1's auc: 0.785033 valid_1's binary_logloss: 0.106166
[49]    training's auc: 0.814857    training's binary_logloss: 0.0724551    valid_1's auc: 0.785419 valid_1's binary_logloss: 0.106146
[50]    training's auc: 0.814942    training's binary_logloss: 0.0724262    valid_1's auc: 0.785531 valid_1's binary_logloss: 0.106129
[51]    training's auc: 0.81514 training's binary_logloss: 0.0723931    valid_1's auc: 0.785477 valid_1's binary_logloss: 0.106135
[52]    training's auc: 0.815228    training's binary_logloss: 0.0723504    valid_1's auc: 0.785603 valid_1's binary_logloss: 0.106082
[53]    training's auc: 0.815361    training's binary_logloss: 0.0723331    valid_1's auc: 0.785824 valid_1's binary_logloss: 0.106057
[54]    training's auc: 0.815387    training's binary_logloss: 0.0723119    valid_1's auc: 0.78613  valid_1's binary_logloss: 0.105999
[55]    training's auc: 0.815514    training's binary_logloss: 0.0722825    valid_1's auc: 0.785812 valid_1's binary_logloss: 0.105995
[56]    training's auc: 0.81556 training's binary_logloss: 0.0722503    valid_1's auc: 0.785934 valid_1's binary_logloss: 0.105974
[57]    training's auc: 0.815588    training's binary_logloss: 0.0722242    valid_1's auc: 0.785955 valid_1's binary_logloss: 0.105976
[58]    training's auc: 0.815732    training's binary_logloss: 0.0721906    valid_1's auc: 0.78627  valid_1's binary_logloss: 0.105962
[59]    training's auc: 0.815706    training's binary_logloss: 0.0721687    valid_1's auc: 0.786258 valid_1's binary_logloss: 0.105951
[60]    training's auc: 0.815718    training's binary_logloss: 0.0721445    valid_1's auc: 0.786547 valid_1's binary_logloss: 0.105923
[61]    training's auc: 0.815734    training's binary_logloss: 0.0721223    valid_1's auc: 0.786568 valid_1's binary_logloss: 0.105923
[62]    training's auc: 0.815961    training's binary_logloss: 0.0720899    valid_1's auc: 0.786347 valid_1's binary_logloss: 0.105958
[63]    training's auc: 0.815941    training's binary_logloss: 0.072071 valid_1's auc: 0.786343 valid_1's binary_logloss: 0.105958
[64]    training's auc: 0.816068    training's binary_logloss: 0.0720548    valid_1's auc: 0.786568 valid_1's binary_logloss: 0.105948
[65]    training's auc: 0.81616 training's binary_logloss: 0.0720343    valid_1's auc: 0.786472 valid_1's binary_logloss: 0.105956
[66]    training's auc: 0.816269    training's binary_logloss: 0.0720188    valid_1's auc: 0.786689 valid_1's binary_logloss: 0.105938
[67]    training's auc: 0.816373    training's binary_logloss: 0.0719948    valid_1's auc: 0.786295 valid_1's binary_logloss: 0.105967
[68]    training's auc: 0.816425    training's binary_logloss: 0.0719716    valid_1's auc: 0.786729 valid_1's binary_logloss: 0.105935
[69]    training's auc: 0.816552    training's binary_logloss: 0.0719538    valid_1's auc: 0.786615 valid_1's binary_logloss: 0.105947
[70]    training's auc: 0.816446    training's binary_logloss: 0.071935 valid_1's auc: 0.786604 valid_1's binary_logloss: 0.105956
[71]    training's auc: 0.816411    training's binary_logloss: 0.0719215    valid_1's auc: 0.78663  valid_1's binary_logloss: 0.105947
[72]    training's auc: 0.816383    training's binary_logloss: 0.0719075    valid_1's auc: 0.786761 valid_1's binary_logloss: 0.105954
[73]    training's auc: 0.816402    training's binary_logloss: 0.071894 valid_1's auc: 0.786845 valid_1's binary_logloss: 0.105942
[74]    training's auc: 0.816524    training's binary_logloss: 0.0718747    valid_1's auc: 0.78676  valid_1's binary_logloss: 0.105952
[75]    training's auc: 0.816573    training's binary_logloss: 0.0718592    valid_1's auc: 0.786748 valid_1's binary_logloss: 0.105953
[76]    training's auc: 0.816671    training's binary_logloss: 0.0718444    valid_1's auc: 0.786642 valid_1's binary_logloss: 0.105978
[77]    training's auc: 0.816744    training's binary_logloss: 0.0718337    valid_1's auc: 0.786565 valid_1's binary_logloss: 0.105976
[78]    training's auc: 0.816932    training's binary_logloss: 0.0718212    valid_1's auc: 0.786543 valid_1's binary_logloss: 0.105991
[79]    training's auc: 0.816983    training's binary_logloss: 0.0718046    valid_1's auc: 0.78653  valid_1's binary_logloss: 0.105997
[80]    training's auc: 0.81722 training's binary_logloss: 0.0717917    valid_1's auc: 0.786595 valid_1's binary_logloss: 0.106016
[81]    training's auc: 0.817299    training's binary_logloss: 0.0717716    valid_1's auc: 0.786638 valid_1's binary_logloss: 0.106026
[82]    training's auc: 0.817325    training's binary_logloss: 0.0717643    valid_1's auc: 0.786818 valid_1's binary_logloss: 0.106014
[83]    training's auc: 0.817473    training's binary_logloss: 0.0717534    valid_1's auc: 0.786651 valid_1's binary_logloss: 0.106037
[84]    training's auc: 0.817696    training's binary_logloss: 0.0717374    valid_1's auc: 0.786501 valid_1's binary_logloss: 0.106077
[85]    training's auc: 0.817887    training's binary_logloss: 0.0717207    valid_1's auc: 0.786448 valid_1's binary_logloss: 0.106083
[86]    training's auc: 0.817972    training's binary_logloss: 0.0717088    valid_1's auc: 0.78615  valid_1's binary_logloss: 0.106111
[87]    training's auc: 0.81801 training's binary_logloss: 0.0716979    valid_1's auc: 0.786182 valid_1's binary_logloss: 0.106116
[88]    training's auc: 0.818042    training's binary_logloss: 0.0716852    valid_1's auc: 0.786034 valid_1's binary_logloss: 0.106152
[89]    training's auc: 0.818076    training's binary_logloss: 0.0716731    valid_1's auc: 0.785794 valid_1's binary_logloss: 0.10618
[90]    training's auc: 0.818089    training's binary_logloss: 0.0716662    valid_1's auc: 0.785778 valid_1's binary_logloss: 0.106185
[91]    training's auc: 0.818269    training's binary_logloss: 0.0716563    valid_1's auc: 0.785701 valid_1's binary_logloss: 0.106191
[92]    training's auc: 0.818316    training's binary_logloss: 0.0716505    valid_1's auc: 0.785694 valid_1's binary_logloss: 0.10619
[93]    training's auc: 0.818289    training's binary_logloss: 0.0716419    valid_1's auc: 0.7855   valid_1's binary_logloss: 0.106214
[94]    training's auc: 0.818372    training's binary_logloss: 0.0716302    valid_1's auc: 0.785272 valid_1's binary_logloss: 0.106236
[95]    training's auc: 0.818424    training's binary_logloss: 0.0716234    valid_1's auc: 0.785335 valid_1's binary_logloss: 0.106238
[96]    training's auc: 0.818421    training's binary_logloss: 0.0716175    valid_1's auc: 0.785293 valid_1's binary_logloss: 0.106244
[97]    training's auc: 0.818421    training's binary_logloss: 0.071611 valid_1's auc: 0.785156 valid_1's binary_logloss: 0.10626
[98]    training's auc: 0.818456    training's binary_logloss: 0.0716046    valid_1's auc: 0.78505  valid_1's binary_logloss: 0.10627
[99]    training's auc: 0.818469    training's binary_logloss: 0.0715997    valid_1's auc: 0.785019 valid_1's binary_logloss: 0.106281
[100]   training's auc: 0.818453    training's binary_logloss: 0.0715917    valid_1's auc: 0.785081 valid_1's binary_logloss: 0.106274
[101]   training's auc: 0.818465    training's binary_logloss: 0.0715882    valid_1's auc: 0.785049 valid_1's binary_logloss: 0.10628
[102]   training's auc: 0.818512    training's binary_logloss: 0.0715827    valid_1's auc: 0.785139 valid_1's binary_logloss: 0.106266
[103]   training's auc: 0.818635    training's binary_logloss: 0.0715739    valid_1's auc: 0.785213 valid_1's binary_logloss: 0.106275
[104]   training's auc: 0.818576    training's binary_logloss: 0.0715629    valid_1's auc: 0.785026 valid_1's binary_logloss: 0.106282
[105]   training's auc: 0.818638    training's binary_logloss: 0.0715508    valid_1's auc: 0.785035 valid_1's binary_logloss: 0.106293
[106]   training's auc: 0.818756    training's binary_logloss: 0.0715429    valid_1's auc: 0.785193 valid_1's binary_logloss: 0.10628
[107]   training's auc: 0.818777    training's binary_logloss: 0.0715386    valid_1's auc: 0.78513  valid_1's binary_logloss: 0.106284
[108]   training's auc: 0.81883 training's binary_logloss: 0.0715356    valid_1's auc: 0.784956 valid_1's binary_logloss: 0.106299
[109]   training's auc: 0.81888 training's binary_logloss: 0.0715297    valid_1's auc: 0.785038 valid_1's binary_logloss: 0.106295
[110]   training's auc: 0.818889    training's binary_logloss: 0.0715239    valid_1's auc: 0.78495  valid_1's binary_logloss: 0.106304
[111]   training's auc: 0.818948    training's binary_logloss: 0.0715186    valid_1's auc: 0.784973 valid_1's binary_logloss: 0.106305
[112]   training's auc: 0.819003    training's binary_logloss: 0.071514 valid_1's auc: 0.784907 valid_1's binary_logloss: 0.106323
[113]   training's auc: 0.819074    training's binary_logloss: 0.0715064    valid_1's auc: 0.784838 valid_1's binary_logloss: 0.106329
[114]   training's auc: 0.819102    training's binary_logloss: 0.0715031    valid_1's auc: 0.784872 valid_1's binary_logloss: 0.106339
[115]   training's auc: 0.819181    training's binary_logloss: 0.0714946    valid_1's auc: 0.784782 valid_1's binary_logloss: 0.106345
[116]   training's auc: 0.819211    training's binary_logloss: 0.0714871    valid_1's auc: 0.784749 valid_1's binary_logloss: 0.106341
[117]   training's auc: 0.819215    training's binary_logloss: 0.0714807    valid_1's auc: 0.784544 valid_1's binary_logloss: 0.106372
[118]   training's auc: 0.81929 training's binary_logloss: 0.0714729    valid_1's auc: 0.784592 valid_1's binary_logloss: 0.106371
[119]   training's auc: 0.819368    training's binary_logloss: 0.0714673    valid_1's auc: 0.784579 valid_1's binary_logloss: 0.10637
[120]   training's auc: 0.819413    training's binary_logloss: 0.0714624    valid_1's auc: 0.784451 valid_1's binary_logloss: 0.106387
[121]   training's auc: 0.819462    training's binary_logloss: 0.071455 valid_1's auc: 0.784657 valid_1's binary_logloss: 0.106364
[122]   training's auc: 0.819496    training's binary_logloss: 0.0714507    valid_1's auc: 0.784709 valid_1's binary_logloss: 0.106369
[123]   training's auc: 0.81958 training's binary_logloss: 0.0714443    valid_1's auc: 0.784624 valid_1's binary_logloss: 0.106382
[124]   training's auc: 0.819635    training's binary_logloss: 0.0714367    valid_1's auc: 0.784379 valid_1's binary_logloss: 0.106401
[125]   training's auc: 0.81967 training's binary_logloss: 0.0714324    valid_1's auc: 0.784237 valid_1's binary_logloss: 0.106417
[126]   training's auc: 0.819719    training's binary_logloss: 0.0714271    valid_1's auc: 0.784327 valid_1's binary_logloss: 0.106417
[127]   training's auc: 0.819772    training's binary_logloss: 0.0714205    valid_1's auc: 0.784247 valid_1's binary_logloss: 0.106427
[128]   training's auc: 0.819792    training's binary_logloss: 0.071417 valid_1's auc: 0.784253 valid_1's binary_logloss: 0.10643
[129]   training's auc: 0.819848    training's binary_logloss: 0.0714141    valid_1's auc: 0.783987 valid_1's binary_logloss: 0.10645
[130]   training's auc: 0.81994 training's binary_logloss: 0.0714057    valid_1's auc: 0.784056 valid_1's binary_logloss: 0.106441
[131]   training's auc: 0.81996 training's binary_logloss: 0.0714018    valid_1's auc: 0.784088 valid_1's binary_logloss: 0.106439
[132]   training's auc: 0.819989    training's binary_logloss: 0.0713864    valid_1's auc: 0.784202 valid_1's binary_logloss: 0.106422
[133]   training's auc: 0.820084    training's binary_logloss: 0.0713787    valid_1's auc: 0.784184 valid_1's binary_logloss: 0.106438
[134]   training's auc: 0.82018 training's binary_logloss: 0.0713551    valid_1's auc: 0.784649 valid_1's binary_logloss: 0.106403
[135]   training's auc: 0.820187    training's binary_logloss: 0.0713526    valid_1's auc: 0.784664 valid_1's binary_logloss: 0.106395
[136]   training's auc: 0.820236    training's binary_logloss: 0.0713442    valid_1's auc: 0.7846   valid_1's binary_logloss: 0.106402
[137]   training's auc: 0.820281    training's binary_logloss: 0.07134  valid_1's auc: 0.784467 valid_1's binary_logloss: 0.106425
[138]   training's auc: 0.82031 training's binary_logloss: 0.0713346    valid_1's auc: 0.784498 valid_1's binary_logloss: 0.106406
[139]   training's auc: 0.820342    training's binary_logloss: 0.071329 valid_1's auc: 0.784551 valid_1's binary_logloss: 0.106404
[140]   training's auc: 0.820391    training's binary_logloss: 0.0713228    valid_1's auc: 0.784492 valid_1's binary_logloss: 0.106409
[141]   training's auc: 0.820472    training's binary_logloss: 0.0713172    valid_1's auc: 0.784594 valid_1's binary_logloss: 0.106407
[142]   training's auc: 0.820561    training's binary_logloss: 0.0713113    valid_1's auc: 0.784403 valid_1's binary_logloss: 0.106431
[143]   training's auc: 0.820572    training's binary_logloss: 0.0713074    valid_1's auc: 0.784214 valid_1's binary_logloss: 0.106449
[144]   training's auc: 0.820633    training's binary_logloss: 0.0713037    valid_1's auc: 0.784233 valid_1's binary_logloss: 0.106444
[145]   training's auc: 0.820673    training's binary_logloss: 0.0712998    valid_1's auc: 0.78431  valid_1's binary_logloss: 0.106438
[146]   training's auc: 0.820693    training's binary_logloss: 0.071294 valid_1's auc: 0.784178 valid_1's binary_logloss: 0.106461
[147]   training's auc: 0.820742    training's binary_logloss: 0.0712881    valid_1's auc: 0.784215 valid_1's binary_logloss: 0.106464
[148]   training's auc: 0.820745    training's binary_logloss: 0.0712848    valid_1's auc: 0.784344 valid_1's binary_logloss: 0.106447
[149]   training's auc: 0.82079 training's binary_logloss: 0.0712808    valid_1's auc: 0.784314 valid_1's binary_logloss: 0.106459
[150]   training's auc: 0.820794    training's binary_logloss: 0.0712769    valid_1's auc: 0.784297 valid_1's binary_logloss: 0.106463
[151]   training's auc: 0.820821    training's binary_logloss: 0.0712722    valid_1's auc: 0.784535 valid_1's binary_logloss: 0.10644
[152]   training's auc: 0.820869    training's binary_logloss: 0.0712653    valid_1's auc: 0.78438  valid_1's binary_logloss: 0.106455
[153]   training's auc: 0.82085 training's binary_logloss: 0.0712634    valid_1's auc: 0.784364 valid_1's binary_logloss: 0.106461
[154]   training's auc: 0.82092 training's binary_logloss: 0.0712579    valid_1's auc: 0.784313 valid_1's binary_logloss: 0.106469
[155]   training's auc: 0.820927    training's binary_logloss: 0.0712541    valid_1's auc: 0.78425  valid_1's binary_logloss: 0.106471
[156]   training's auc: 0.821014    training's binary_logloss: 0.0712325    valid_1's auc: 0.784621 valid_1's binary_logloss: 0.106439
[157]   training's auc: 0.821049    training's binary_logloss: 0.0712301    valid_1's auc: 0.784657 valid_1's binary_logloss: 0.106439
[158]   training's auc: 0.821087    training's binary_logloss: 0.0712243    valid_1's auc: 0.784842 valid_1's binary_logloss: 0.106423
[159]   training's auc: 0.821106    training's binary_logloss: 0.0712181    valid_1's auc: 0.78503  valid_1's binary_logloss: 0.106405
[160]   training's auc: 0.821107    training's binary_logloss: 0.0712141    valid_1's auc: 0.784844 valid_1's binary_logloss: 0.106419
[161]   training's auc: 0.82119 training's binary_logloss: 0.0712073    valid_1's auc: 0.784637 valid_1's binary_logloss: 0.106434
Early stopping, best iteration is:
[61]    training's auc: 0.815734    training's binary_logloss: 0.0721223    valid_1's auc: 0.786568 valid_1's binary_logloss: 0.105923
8
[1] training's auc: 0.665367    training's binary_logloss: 0.0868856    valid_1's auc: 0.638034 valid_1's binary_logloss: 0.109915
Training until validation scores don't improve for 100 rounds.
[2] training's auc: 0.791507    training's binary_logloss: 0.0855856    valid_1's auc: 0.778478 valid_1's binary_logloss: 0.108111
[3] training's auc: 0.795061    training's binary_logloss: 0.0850129    valid_1's auc: 0.782413 valid_1's binary_logloss: 0.107401
[4] training's auc: 0.793479    training's binary_logloss: 0.0844315    valid_1's auc: 0.778869 valid_1's binary_logloss: 0.106679
[5] training's auc: 0.801927    training's binary_logloss: 0.0834844    valid_1's auc: 0.787463 valid_1's binary_logloss: 0.10532
[6] training's auc: 0.800946    training's binary_logloss: 0.0827504    valid_1's auc: 0.785723 valid_1's binary_logloss: 0.104338
[7] training's auc: 0.800642    training's binary_logloss: 0.0822815    valid_1's auc: 0.785003 valid_1's binary_logloss: 0.103808
[8] training's auc: 0.802571    training's binary_logloss: 0.0816004    valid_1's auc: 0.787851 valid_1's binary_logloss: 0.102893
[9] training's auc: 0.804417    training's binary_logloss: 0.0814074    valid_1's auc: 0.789965 valid_1's binary_logloss: 0.102667
[10]    training's auc: 0.804715    training's binary_logloss: 0.0810815    valid_1's auc: 0.789315 valid_1's binary_logloss: 0.102308
[11]    training's auc: 0.80572 training's binary_logloss: 0.0805795    valid_1's auc: 0.790888 valid_1's binary_logloss: 0.101646
[12]    training's auc: 0.805909    training's binary_logloss: 0.0800685    valid_1's auc: 0.791102 valid_1's binary_logloss: 0.101022
[13]    training's auc: 0.805148    training's binary_logloss: 0.0798425    valid_1's auc: 0.789926 valid_1's binary_logloss: 0.100788
[14]    training's auc: 0.805025    training's binary_logloss: 0.0796195    valid_1's auc: 0.788701 valid_1's binary_logloss: 0.100546
[15]    training's auc: 0.806107    training's binary_logloss: 0.0792353    valid_1's auc: 0.790085 valid_1's binary_logloss: 0.100016
[16]    training's auc: 0.80629 training's binary_logloss: 0.0788918    valid_1's auc: 0.790386 valid_1's binary_logloss: 0.0995958
[17]    training's auc: 0.806259    training's binary_logloss: 0.0786638    valid_1's auc: 0.790254 valid_1's binary_logloss: 0.0993615
[18]    training's auc: 0.806691    training's binary_logloss: 0.0783631    valid_1's auc: 0.790354 valid_1's binary_logloss: 0.0990228
[19]    training's auc: 0.80746 training's binary_logloss: 0.0781036    valid_1's auc: 0.791374 valid_1's binary_logloss: 0.0987136
[20]    training's auc: 0.80775 training's binary_logloss: 0.0779183    valid_1's auc: 0.791057 valid_1's binary_logloss: 0.0985308
[21]    training's auc: 0.808059    training's binary_logloss: 0.0776847    valid_1's auc: 0.79161  valid_1's binary_logloss: 0.098227
[22]    training's auc: 0.807976    training's binary_logloss: 0.0774765    valid_1's auc: 0.791876 valid_1's binary_logloss: 0.0979735
[23]    training's auc: 0.808149    training's binary_logloss: 0.0773444    valid_1's auc: 0.792268 valid_1's binary_logloss: 0.0978359
[24]    training's auc: 0.808157    training's binary_logloss: 0.0771487    valid_1's auc: 0.791756 valid_1's binary_logloss: 0.0976358
[25]    training's auc: 0.808356    training's binary_logloss: 0.0770355    valid_1's auc: 0.791112 valid_1's binary_logloss: 0.0975479
[26]    training's auc: 0.808448    training's binary_logloss: 0.0768953    valid_1's auc: 0.791684 valid_1's binary_logloss: 0.097354
[27]    training's auc: 0.808386    training's binary_logloss: 0.0767449    valid_1's auc: 0.79175  valid_1's binary_logloss: 0.0972002
[28]    training's auc: 0.809099    training's binary_logloss: 0.0766654    valid_1's auc: 0.792508 valid_1's binary_logloss: 0.0971177
[29]    training's auc: 0.80913 training's binary_logloss: 0.0765278    valid_1's auc: 0.792793 valid_1's binary_logloss: 0.0969534
[30]    training's auc: 0.809349    training's binary_logloss: 0.0764214    valid_1's auc: 0.793102 valid_1's binary_logloss: 0.0968668
[31]    training's auc: 0.809617    training's binary_logloss: 0.0762925    valid_1's auc: 0.793769 valid_1's binary_logloss: 0.0967063
[32]    training's auc: 0.809557    training's binary_logloss: 0.076225 valid_1's auc: 0.793807 valid_1's binary_logloss: 0.0966626
[33]    training's auc: 0.810126    training's binary_logloss: 0.0761187    valid_1's auc: 0.794545 valid_1's binary_logloss: 0.0965436
[34]    training's auc: 0.809974    training's binary_logloss: 0.0760628    valid_1's auc: 0.794193 valid_1's binary_logloss: 0.0964899
[35]    training's auc: 0.810479    training's binary_logloss: 0.0759813    valid_1's auc: 0.794976 valid_1's binary_logloss: 0.0963861
[36]    training's auc: 0.810464    training's binary_logloss: 0.075896 valid_1's auc: 0.795457 valid_1's binary_logloss: 0.0962777
[37]    training's auc: 0.810701    training's binary_logloss: 0.0758081    valid_1's auc: 0.795639 valid_1's binary_logloss: 0.0961879
[38]    training's auc: 0.810869    training's binary_logloss: 0.0757245    valid_1's auc: 0.795936 valid_1's binary_logloss: 0.0961019
[39]    training's auc: 0.811406    training's binary_logloss: 0.0756708    valid_1's auc: 0.796704 valid_1's binary_logloss: 0.0960417
[40]    training's auc: 0.811464    training's binary_logloss: 0.0756043    valid_1's auc: 0.797168 valid_1's binary_logloss: 0.095968
[41]    training's auc: 0.811443    training's binary_logloss: 0.0755448    valid_1's auc: 0.797791 valid_1's binary_logloss: 0.0958963
[42]    training's auc: 0.81172 training's binary_logloss: 0.0754894    valid_1's auc: 0.798391 valid_1's binary_logloss: 0.09584
[43]    training's auc: 0.811679    training's binary_logloss: 0.0754119    valid_1's auc: 0.79814  valid_1's binary_logloss: 0.0957827
[44]    training's auc: 0.811701    training's binary_logloss: 0.0753675    valid_1's auc: 0.798242 valid_1's binary_logloss: 0.0957432
[45]    training's auc: 0.81173 training's binary_logloss: 0.0753183    valid_1's auc: 0.798303 valid_1's binary_logloss: 0.0957024
[46]    training's auc: 0.811994    training's binary_logloss: 0.0752779    valid_1's auc: 0.798184 valid_1's binary_logloss: 0.0956843
[47]    training's auc: 0.811946    training's binary_logloss: 0.0752103    valid_1's auc: 0.798203 valid_1's binary_logloss: 0.0956371
[48]    training's auc: 0.812328    training's binary_logloss: 0.0751747    valid_1's auc: 0.798574 valid_1's binary_logloss: 0.0955971
[49]    training's auc: 0.81253 training's binary_logloss: 0.0751401    valid_1's auc: 0.798755 valid_1's binary_logloss: 0.0955682
[50]    training's auc: 0.812567    training's binary_logloss: 0.075102 valid_1's auc: 0.798895 valid_1's binary_logloss: 0.0955279
[51]    training's auc: 0.812545    training's binary_logloss: 0.0750742    valid_1's auc: 0.798918 valid_1's binary_logloss: 0.0955016
[52]    training's auc: 0.812577    training's binary_logloss: 0.0750226    valid_1's auc: 0.798909 valid_1's binary_logloss: 0.095471
[53]    training's auc: 0.812859    training's binary_logloss: 0.0750067    valid_1's auc: 0.798986 valid_1's binary_logloss: 0.0954679
[54]    training's auc: 0.8129  training's binary_logloss: 0.0749863    valid_1's auc: 0.799125 valid_1's binary_logloss: 0.0954408
[55]    training's auc: 0.813031    training's binary_logloss: 0.0749588    valid_1's auc: 0.799384 valid_1's binary_logloss: 0.0954081
[56]    training's auc: 0.812881    training's binary_logloss: 0.074927 valid_1's auc: 0.799338 valid_1's binary_logloss: 0.0953942
[57]    training's auc: 0.812888    training's binary_logloss: 0.0748955    valid_1's auc: 0.799291 valid_1's binary_logloss: 0.0953836
[58]    training's auc: 0.813015    training's binary_logloss: 0.0748659    valid_1's auc: 0.799475 valid_1's binary_logloss: 0.0953671
[59]    training's auc: 0.813111    training's binary_logloss: 0.0748398    valid_1's auc: 0.799842 valid_1's binary_logloss: 0.0953486
[60]    training's auc: 0.813214    training's binary_logloss: 0.0748196    valid_1's auc: 0.800158 valid_1's binary_logloss: 0.0953262
[61]    training's auc: 0.813369    training's binary_logloss: 0.0747917    valid_1's auc: 0.80028  valid_1's binary_logloss: 0.0953142
[62]    training's auc: 0.813347    training's binary_logloss: 0.074751 valid_1's auc: 0.800148 valid_1's binary_logloss: 0.0952901
[63]    training's auc: 0.813447    training's binary_logloss: 0.0747295    valid_1's auc: 0.800345 valid_1's binary_logloss: 0.0952695
[64]    training's auc: 0.813481    training's binary_logloss: 0.0747062    valid_1's auc: 0.80039  valid_1's binary_logloss: 0.0952386
[65]    training's auc: 0.813693    training's binary_logloss: 0.0746838    valid_1's auc: 0.800635 valid_1's binary_logloss: 0.0952214
[66]    training's auc: 0.81388 training's binary_logloss: 0.0746691    valid_1's auc: 0.800462 valid_1's binary_logloss: 0.0952094
[67]    training's auc: 0.814062    training's binary_logloss: 0.0746476    valid_1's auc: 0.800544 valid_1's binary_logloss: 0.0951982
[68]    training's auc: 0.814117    training's binary_logloss: 0.074624 valid_1's auc: 0.800664 valid_1's binary_logloss: 0.0951826
[69]    training's auc: 0.814249    training's binary_logloss: 0.0746072    valid_1's auc: 0.800589 valid_1's binary_logloss: 0.0951832
[70]    training's auc: 0.814288    training's binary_logloss: 0.0745909    valid_1's auc: 0.800656 valid_1's binary_logloss: 0.0951881
[71]    training's auc: 0.814359    training's binary_logloss: 0.074577 valid_1's auc: 0.800828 valid_1's binary_logloss: 0.0951731
[72]    training's auc: 0.814241    training's binary_logloss: 0.0745619    valid_1's auc: 0.80094  valid_1's binary_logloss: 0.0951546
[73]    training's auc: 0.814325    training's binary_logloss: 0.0745471    valid_1's auc: 0.800995 valid_1's binary_logloss: 0.0951633
[74]    training's auc: 0.81452 training's binary_logloss: 0.0745357    valid_1's auc: 0.801002 valid_1's binary_logloss: 0.0951605
[75]    training's auc: 0.814656    training's binary_logloss: 0.0745232    valid_1's auc: 0.801098 valid_1's binary_logloss: 0.095154
[76]    training's auc: 0.814743    training's binary_logloss: 0.0745071    valid_1's auc: 0.801275 valid_1's binary_logloss: 0.0951344
[77]    training's auc: 0.81473 training's binary_logloss: 0.0744907    valid_1's auc: 0.80116  valid_1's binary_logloss: 0.0951438
[78]    training's auc: 0.814767    training's binary_logloss: 0.0744837    valid_1's auc: 0.801177 valid_1's binary_logloss: 0.0951393
[79]    training's auc: 0.814784    training's binary_logloss: 0.0744692    valid_1's auc: 0.801294 valid_1's binary_logloss: 0.0951311
[80]    training's auc: 0.814813    training's binary_logloss: 0.0744599    valid_1's auc: 0.801402 valid_1's binary_logloss: 0.0951247
[81]    training's auc: 0.814974    training's binary_logloss: 0.0744446    valid_1's auc: 0.801533 valid_1's binary_logloss: 0.0951186
[82]    training's auc: 0.815008    training's binary_logloss: 0.074433 valid_1's auc: 0.801598 valid_1's binary_logloss: 0.0951093
[83]    training's auc: 0.81512 training's binary_logloss: 0.0744136    valid_1's auc: 0.801794 valid_1's binary_logloss: 0.0950982
[84]    training's auc: 0.815284    training's binary_logloss: 0.0744   valid_1's auc: 0.801921 valid_1's binary_logloss: 0.0951031
[85]    training's auc: 0.81543 training's binary_logloss: 0.0743888    valid_1's auc: 0.801823 valid_1's binary_logloss: 0.0951
[86]    training's auc: 0.815413    training's binary_logloss: 0.0743766    valid_1's auc: 0.801803 valid_1's binary_logloss: 0.0950919
[87]    training's auc: 0.815411    training's binary_logloss: 0.0743599    valid_1's auc: 0.80168  valid_1's binary_logloss: 0.0950894
[88]    training's auc: 0.815474    training's binary_logloss: 0.0743497    valid_1's auc: 0.80175  valid_1's binary_logloss: 0.0950732
[89]    training's auc: 0.815529    training's binary_logloss: 0.0743353    valid_1's auc: 0.801893 valid_1's binary_logloss: 0.0950631
[90]    training's auc: 0.815556    training's binary_logloss: 0.0743267    valid_1's auc: 0.801935 valid_1's binary_logloss: 0.0950573
[91]    training's auc: 0.815756    training's binary_logloss: 0.0743137    valid_1's auc: 0.801872 valid_1's binary_logloss: 0.0950584
[92]    training's auc: 0.81582 training's binary_logloss: 0.0743069    valid_1's auc: 0.801957 valid_1's binary_logloss: 0.0950565
[93]    training's auc: 0.815822    training's binary_logloss: 0.0742966    valid_1's auc: 0.802232 valid_1's binary_logloss: 0.0950399
[94]    training's auc: 0.815967    training's binary_logloss: 0.0742838    valid_1's auc: 0.802182 valid_1's binary_logloss: 0.0950338
[95]    training's auc: 0.816057    training's binary_logloss: 0.0742736    valid_1's auc: 0.802218 valid_1's binary_logloss: 0.0950291
[96]    training's auc: 0.816071    training's binary_logloss: 0.0742607    valid_1's auc: 0.802331 valid_1's binary_logloss: 0.0950074
[97]    training's auc: 0.81615 training's binary_logloss: 0.074253 valid_1's auc: 0.802446 valid_1's binary_logloss: 0.095004
[98]    training's auc: 0.8161  training's binary_logloss: 0.074242 valid_1's auc: 0.802353 valid_1's binary_logloss: 0.0950067
[99]    training's auc: 0.816152    training's binary_logloss: 0.0742354    valid_1's auc: 0.802258 valid_1's binary_logloss: 0.0950091
[100]   training's auc: 0.816145    training's binary_logloss: 0.0742291    valid_1's auc: 0.802428 valid_1's binary_logloss: 0.0949979
[101]   training's auc: 0.816168    training's binary_logloss: 0.0742227    valid_1's auc: 0.80237  valid_1's binary_logloss: 0.0950027
[102]   training's auc: 0.81624 training's binary_logloss: 0.074216 valid_1's auc: 0.802414 valid_1's binary_logloss: 0.0949976
[103]   training's auc: 0.81639 training's binary_logloss: 0.074204 valid_1's auc: 0.802322 valid_1's binary_logloss: 0.095003
[104]   training's auc: 0.81646 training's binary_logloss: 0.074193 valid_1's auc: 0.802501 valid_1's binary_logloss: 0.0949953
[105]   training's auc: 0.816562    training's binary_logloss: 0.0741845    valid_1's auc: 0.802451 valid_1's binary_logloss: 0.0949929
[106]   training's auc: 0.816644    training's binary_logloss: 0.0741798    valid_1's auc: 0.802444 valid_1's binary_logloss: 0.0949924
[107]   training's auc: 0.816782    training's binary_logloss: 0.0741715    valid_1's auc: 0.802285 valid_1's binary_logloss: 0.0949931
[108]   training's auc: 0.816853    training's binary_logloss: 0.0741645    valid_1's auc: 0.802388 valid_1's binary_logloss: 0.0949899
[109]   training's auc: 0.816929    training's binary_logloss: 0.0741558    valid_1's auc: 0.802352 valid_1's binary_logloss: 0.09499
[110]   training's auc: 0.817009    training's binary_logloss: 0.0741498    valid_1's auc: 0.802288 valid_1's binary_logloss: 0.0949951
[111]   training's auc: 0.817059    training's binary_logloss: 0.0741419    valid_1's auc: 0.802315 valid_1's binary_logloss: 0.094996
[112]   training's auc: 0.817057    training's binary_logloss: 0.0741364    valid_1's auc: 0.802221 valid_1's binary_logloss: 0.0950033
[113]   training's auc: 0.817085    training's binary_logloss: 0.0741058    valid_1's auc: 0.802134 valid_1's binary_logloss: 0.0949912
[114]   training's auc: 0.817123    training's binary_logloss: 0.0741015    valid_1's auc: 0.802173 valid_1's binary_logloss: 0.0949877
[115]   training's auc: 0.817113    training's binary_logloss: 0.0740943    valid_1's auc: 0.802096 valid_1's binary_logloss: 0.0949946
[116]   training's auc: 0.817139    training's binary_logloss: 0.0740871    valid_1's auc: 0.80213  valid_1's binary_logloss: 0.094992
[117]   training's auc: 0.817159    training's binary_logloss: 0.0740785    valid_1's auc: 0.802129 valid_1's binary_logloss: 0.0949948
[118]   training's auc: 0.817272    training's binary_logloss: 0.0740693    valid_1's auc: 0.802091 valid_1's binary_logloss: 0.0949977
[119]   training's auc: 0.817387    training's binary_logloss: 0.0740614    valid_1's auc: 0.801905 valid_1's binary_logloss: 0.0950029
[120]   training's auc: 0.817514    training's binary_logloss: 0.0740524    valid_1's auc: 0.801835 valid_1's binary_logloss: 0.0950065
[121]   training's auc: 0.817639    training's binary_logloss: 0.074043 valid_1's auc: 0.801834 valid_1's binary_logloss: 0.0950134
[122]   training's auc: 0.817786    training's binary_logloss: 0.0740341    valid_1's auc: 0.801934 valid_1's binary_logloss: 0.0950115
[123]   training's auc: 0.817819    training's binary_logloss: 0.074026 valid_1's auc: 0.801927 valid_1's binary_logloss: 0.0950144
[124]   training's auc: 0.817866    training's binary_logloss: 0.0740188    valid_1's auc: 0.801899 valid_1's binary_logloss: 0.0950179
[125]   training's auc: 0.817929    training's binary_logloss: 0.0740085    valid_1's auc: 0.801989 valid_1's binary_logloss: 0.0950056
[126]   training's auc: 0.817945    training's binary_logloss: 0.0740053    valid_1's auc: 0.802016 valid_1's binary_logloss: 0.0950052
[127]   training's auc: 0.817971    training's binary_logloss: 0.0740011    valid_1's auc: 0.801981 valid_1's binary_logloss: 0.0950142
[128]   training's auc: 0.818047    training's binary_logloss: 0.0739978    valid_1's auc: 0.801979 valid_1's binary_logloss: 0.0950113
[129]   training's auc: 0.818142    training's binary_logloss: 0.073992 valid_1's auc: 0.80193  valid_1's binary_logloss: 0.095014
[130]   training's auc: 0.818194    training's binary_logloss: 0.073985 valid_1's auc: 0.801948 valid_1's binary_logloss: 0.0950175
[131]   training's auc: 0.818254    training's binary_logloss: 0.0739822    valid_1's auc: 0.801862 valid_1's binary_logloss: 0.0950211
[132]   training's auc: 0.818281    training's binary_logloss: 0.0739793    valid_1's auc: 0.801814 valid_1's binary_logloss: 0.0950347
[133]   training's auc: 0.818395    training's binary_logloss: 0.0739714    valid_1's auc: 0.80174  valid_1's binary_logloss: 0.0950386
[134]   training's auc: 0.818446    training's binary_logloss: 0.0739676    valid_1's auc: 0.801676 valid_1's binary_logloss: 0.0950468
[135]   training's auc: 0.818497    training's binary_logloss: 0.0739609    valid_1's auc: 0.801554 valid_1's binary_logloss: 0.0950441
[136]   training's auc: 0.818522    training's binary_logloss: 0.0739569    valid_1's auc: 0.801637 valid_1's binary_logloss: 0.0950404
[137]   training's auc: 0.81855 training's binary_logloss: 0.0739527    valid_1's auc: 0.801666 valid_1's binary_logloss: 0.09504
[138]   training's auc: 0.818545    training's binary_logloss: 0.073948 valid_1's auc: 0.801624 valid_1's binary_logloss: 0.095049
[139]   training's auc: 0.818598    training's binary_logloss: 0.0739444    valid_1's auc: 0.801586 valid_1's binary_logloss: 0.0950465
[140]   training's auc: 0.818629    training's binary_logloss: 0.0739346    valid_1's auc: 0.801645 valid_1's binary_logloss: 0.0950355
[141]   training's auc: 0.818692    training's binary_logloss: 0.0739312    valid_1's auc: 0.801564 valid_1's binary_logloss: 0.0950512
[142]   training's auc: 0.818662    training's binary_logloss: 0.0739265    valid_1's auc: 0.801566 valid_1's binary_logloss: 0.0950428
[143]   training's auc: 0.818677    training's binary_logloss: 0.0739241    valid_1's auc: 0.80156  valid_1's binary_logloss: 0.0950412
[144]   training's auc: 0.81866 training's binary_logloss: 0.0739217    valid_1's auc: 0.801581 valid_1's binary_logloss: 0.0950383
[145]   training's auc: 0.81869 training's binary_logloss: 0.073917 valid_1's auc: 0.801534 valid_1's binary_logloss: 0.0950444
[146]   training's auc: 0.818702    training's binary_logloss: 0.0739142    valid_1's auc: 0.80157  valid_1's binary_logloss: 0.0950473
[147]   training's auc: 0.818768    training's binary_logloss: 0.0739062    valid_1's auc: 0.801601 valid_1's binary_logloss: 0.0950422
[148]   training's auc: 0.818787    training's binary_logloss: 0.073903 valid_1's auc: 0.801642 valid_1's binary_logloss: 0.095036
[149]   training's auc: 0.818727    training's binary_logloss: 0.0738935    valid_1's auc: 0.801648 valid_1's binary_logloss: 0.0950198
[150]   training's auc: 0.818757    training's binary_logloss: 0.0738884    valid_1's auc: 0.80171  valid_1's binary_logloss: 0.0950195
[151]   training's auc: 0.81883 training's binary_logloss: 0.0738825    valid_1's auc: 0.801704 valid_1's binary_logloss: 0.0950253
[152]   training's auc: 0.818895    training's binary_logloss: 0.073878 valid_1's auc: 0.801701 valid_1's binary_logloss: 0.0950301
[153]   training's auc: 0.818909    training's binary_logloss: 0.0738754    valid_1's auc: 0.801637 valid_1's binary_logloss: 0.0950361
[154]   training's auc: 0.818953    training's binary_logloss: 0.0738718    valid_1's auc: 0.801606 valid_1's binary_logloss: 0.0950366
[155]   training's auc: 0.819025    training's binary_logloss: 0.0738668    valid_1's auc: 0.801601 valid_1's binary_logloss: 0.0950417
[156]   training's auc: 0.819068    training's binary_logloss: 0.0738627    valid_1's auc: 0.801585 valid_1's binary_logloss: 0.0950482
[157]   training's auc: 0.819171    training's binary_logloss: 0.0738559    valid_1's auc: 0.801686 valid_1's binary_logloss: 0.0950449
[158]   training's auc: 0.819191    training's binary_logloss: 0.073852 valid_1's auc: 0.801697 valid_1's binary_logloss: 0.09504
[159]   training's auc: 0.819194    training's binary_logloss: 0.073848 valid_1's auc: 0.801609 valid_1's binary_logloss: 0.0950393
[160]   training's auc: 0.819294    training's binary_logloss: 0.073842 valid_1's auc: 0.801457 valid_1's binary_logloss: 0.0950461
[161]   training's auc: 0.819357    training's binary_logloss: 0.0738356    valid_1's auc: 0.801522 valid_1's binary_logloss: 0.0950438
[162]   training's auc: 0.819381    training's binary_logloss: 0.0738313    valid_1's auc: 0.801544 valid_1's binary_logloss: 0.0950454
[163]   training's auc: 0.819433    training's binary_logloss: 0.0738278    valid_1's auc: 0.801594 valid_1's binary_logloss: 0.0950475
[164]   training's auc: 0.819479    training's binary_logloss: 0.0738238    valid_1's auc: 0.801506 valid_1's binary_logloss: 0.0950578
[165]   training's auc: 0.819485    training's binary_logloss: 0.0738228    valid_1's auc: 0.801513 valid_1's binary_logloss: 0.0950475
[166]   training's auc: 0.819515    training's binary_logloss: 0.0738183    valid_1's auc: 0.801514 valid_1's binary_logloss: 0.0950485
[167]   training's auc: 0.819536    training's binary_logloss: 0.0738119    valid_1's auc: 0.801463 valid_1's binary_logloss: 0.0950497
[168]   training's auc: 0.819537    training's binary_logloss: 0.0738086    valid_1's auc: 0.801461 valid_1's binary_logloss: 0.09505
[169]   training's auc: 0.819573    training's binary_logloss: 0.0738042    valid_1's auc: 0.801511 valid_1's binary_logloss: 0.0950516
[170]   training's auc: 0.819603    training's binary_logloss: 0.0738014    valid_1's auc: 0.801525 valid_1's binary_logloss: 0.0950523
[171]   training's auc: 0.819669    training's binary_logloss: 0.0737929    valid_1's auc: 0.80155  valid_1's binary_logloss: 0.0950544
[172]   training's auc: 0.819661    training's binary_logloss: 0.073789 valid_1's auc: 0.801575 valid_1's binary_logloss: 0.0950543
[173]   training's auc: 0.819682    training's binary_logloss: 0.0737863    valid_1's auc: 0.801548 valid_1's binary_logloss: 0.0950514
[174]   training's auc: 0.819686    training's binary_logloss: 0.0737827    valid_1's auc: 0.80149  valid_1's binary_logloss: 0.0950591
[175]   training's auc: 0.819784    training's binary_logloss: 0.0737754    valid_1's auc: 0.801542 valid_1's binary_logloss: 0.0950543
[176]   training's auc: 0.819856    training's binary_logloss: 0.07377  valid_1's auc: 0.801415 valid_1's binary_logloss: 0.0950595
[177]   training's auc: 0.819883    training's binary_logloss: 0.0737671    valid_1's auc: 0.801387 valid_1's binary_logloss: 0.0950631
[178]   training's auc: 0.819954    training's binary_logloss: 0.0737635    valid_1's auc: 0.801343 valid_1's binary_logloss: 0.0950697
[179]   training's auc: 0.819976    training's binary_logloss: 0.0737608    valid_1's auc: 0.801329 valid_1's binary_logloss: 0.0950714
[180]   training's auc: 0.820057    training's binary_logloss: 0.0737549    valid_1's auc: 0.801323 valid_1's binary_logloss: 0.0950687
[181]   training's auc: 0.820072    training's binary_logloss: 0.0737527    valid_1's auc: 0.801345 valid_1's binary_logloss: 0.0950752
[182]   training's auc: 0.820129    training's binary_logloss: 0.0737491    valid_1's auc: 0.801278 valid_1's binary_logloss: 0.0950773
[183]   training's auc: 0.820178    training's binary_logloss: 0.0737455    valid_1's auc: 0.80128  valid_1's binary_logloss: 0.0950661
[184]   training's auc: 0.820237    training's binary_logloss: 0.073741 valid_1's auc: 0.801223 valid_1's binary_logloss: 0.0950696
[185]   training's auc: 0.820263    training's binary_logloss: 0.0737363    valid_1's auc: 0.801297 valid_1's binary_logloss: 0.095063
[186]   training's auc: 0.820275    training's binary_logloss: 0.0737336    valid_1's auc: 0.80128  valid_1's binary_logloss: 0.0950628
[187]   training's auc: 0.820333    training's binary_logloss: 0.0737266    valid_1's auc: 0.801124 valid_1's binary_logloss: 0.0950909
[188]   training's auc: 0.820422    training's binary_logloss: 0.073719 valid_1's auc: 0.801112 valid_1's binary_logloss: 0.0950951
[189]   training's auc: 0.820417    training's binary_logloss: 0.0737164    valid_1's auc: 0.801178 valid_1's binary_logloss: 0.0950911
[190]   training's auc: 0.820463    training's binary_logloss: 0.0737107    valid_1's auc: 0.801299 valid_1's binary_logloss: 0.0950894
[191]   training's auc: 0.820474    training's binary_logloss: 0.0737078    valid_1's auc: 0.801324 valid_1's binary_logloss: 0.0950889
[192]   training's auc: 0.820482    training's binary_logloss: 0.0737056    valid_1's auc: 0.801229 valid_1's binary_logloss: 0.0950925
[193]   training's auc: 0.820573    training's binary_logloss: 0.0736989    valid_1's auc: 0.801189 valid_1's binary_logloss: 0.0951055
[194]   training's auc: 0.820602    training's binary_logloss: 0.0736934    valid_1's auc: 0.800876 valid_1's binary_logloss: 0.0951183
[195]   training's auc: 0.820616    training's binary_logloss: 0.0736868    valid_1's auc: 0.800814 valid_1's binary_logloss: 0.0951133
[196]   training's auc: 0.820633    training's binary_logloss: 0.0736837    valid_1's auc: 0.80082  valid_1's binary_logloss: 0.0951169
[197]   training's auc: 0.820661    training's binary_logloss: 0.0736819    valid_1's auc: 0.800804 valid_1's binary_logloss: 0.0951116
[198]   training's auc: 0.820649    training's binary_logloss: 0.0736789    valid_1's auc: 0.800729 valid_1's binary_logloss: 0.0951241
[199]   training's auc: 0.820663    training's binary_logloss: 0.0736741    valid_1's auc: 0.800617 valid_1's binary_logloss: 0.0951238
[200]   training's auc: 0.820727    training's binary_logloss: 0.073668 valid_1's auc: 0.800694 valid_1's binary_logloss: 0.0951257
[201]   training's auc: 0.820767    training's binary_logloss: 0.0736626    valid_1's auc: 0.800984 valid_1's binary_logloss: 0.0951079
[202]   training's auc: 0.820804    training's binary_logloss: 0.0736584    valid_1's auc: 0.801181 valid_1's binary_logloss: 0.0950977
[203]   training's auc: 0.820833    training's binary_logloss: 0.0736564    valid_1's auc: 0.801223 valid_1's binary_logloss: 0.0950869
[204]   training's auc: 0.820883    training's binary_logloss: 0.0736367    valid_1's auc: 0.801209 valid_1's binary_logloss: 0.0950887
Early stopping, best iteration is:
[104]   training's auc: 0.81646 training's binary_logloss: 0.074193 valid_1's auc: 0.802501 valid_1's binary_logloss: 0.0949953
8
[1] training's auc: 0.659021    training's binary_logloss: 0.0977451    valid_1's auc: 0.667922 valid_1's binary_logloss: 0.0667753
Training until validation scores don't improve for 100 rounds.
[2] training's auc: 0.782929    training's binary_logloss: 0.0962916    valid_1's auc: 0.784101 valid_1's binary_logloss: 0.0659222
[3] training's auc: 0.78618 training's binary_logloss: 0.0956767    valid_1's auc: 0.788715 valid_1's binary_logloss: 0.0655366
[4] training's auc: 0.785892    training's binary_logloss: 0.095076 valid_1's auc: 0.790152 valid_1's binary_logloss: 0.0651196
[5] training's auc: 0.792838    training's binary_logloss: 0.0940868    valid_1's auc: 0.800957 valid_1's binary_logloss: 0.0644159
[6] training's auc: 0.797256    training's binary_logloss: 0.0932581    valid_1's auc: 0.806857 valid_1's binary_logloss: 0.063861
[7] training's auc: 0.796345    training's binary_logloss: 0.0927891    valid_1's auc: 0.80429  valid_1's binary_logloss: 0.0635305
[8] training's auc: 0.797244    training's binary_logloss: 0.0920677    valid_1's auc: 0.804323 valid_1's binary_logloss: 0.0630112
[9] training's auc: 0.799111    training's binary_logloss: 0.0918718    valid_1's auc: 0.807082 valid_1's binary_logloss: 0.0628553
[10]    training's auc: 0.798274    training's binary_logloss: 0.091531 valid_1's auc: 0.805748 valid_1's binary_logloss: 0.0625961
[11]    training's auc: 0.798767    training's binary_logloss: 0.0909135    valid_1's auc: 0.806574 valid_1's binary_logloss: 0.0621389
[12]    training's auc: 0.801005    training's binary_logloss: 0.0903573    valid_1's auc: 0.8077   valid_1's binary_logloss: 0.0616888
[13]    training's auc: 0.800594    training's binary_logloss: 0.0901254    valid_1's auc: 0.807376 valid_1's binary_logloss: 0.0615163
[14]    training's auc: 0.799391    training's binary_logloss: 0.0898873    valid_1's auc: 0.804602 valid_1's binary_logloss: 0.0613258
[15]    training's auc: 0.799973    training's binary_logloss: 0.0894598    valid_1's auc: 0.80734  valid_1's binary_logloss: 0.0609837
[16]    training's auc: 0.800826    training's binary_logloss: 0.0890624    valid_1's auc: 0.807424 valid_1's binary_logloss: 0.0606438
[17]    training's auc: 0.800556    training's binary_logloss: 0.0888349    valid_1's auc: 0.807063 valid_1's binary_logloss: 0.0604495
[18]    training's auc: 0.800736    training's binary_logloss: 0.0885066    valid_1's auc: 0.808055 valid_1's binary_logloss: 0.0601512
[19]    training's auc: 0.801701    training's binary_logloss: 0.0882099    valid_1's auc: 0.809107 valid_1's binary_logloss: 0.0598891
[20]    training's auc: 0.801487    training's binary_logloss: 0.0880168    valid_1's auc: 0.80943  valid_1's binary_logloss: 0.0597102
[21]    training's auc: 0.801994    training's binary_logloss: 0.0877765    valid_1's auc: 0.810201 valid_1's binary_logloss: 0.0594838
[22]    training's auc: 0.802066    training's binary_logloss: 0.0875477    valid_1's auc: 0.810087 valid_1's binary_logloss: 0.0592732
[23]    training's auc: 0.802621    training's binary_logloss: 0.087402 valid_1's auc: 0.809696 valid_1's binary_logloss: 0.0591517
[24]    training's auc: 0.802456    training's binary_logloss: 0.0872056    valid_1's auc: 0.809599 valid_1's binary_logloss: 0.0589684
[25]    training's auc: 0.802714    training's binary_logloss: 0.0870772    valid_1's auc: 0.809573 valid_1's binary_logloss: 0.0588481
[26]    training's auc: 0.803158    training's binary_logloss: 0.0868977    valid_1's auc: 0.809865 valid_1's binary_logloss: 0.0586947
[27]    training's auc: 0.803217    training's binary_logloss: 0.086744 valid_1's auc: 0.809628 valid_1's binary_logloss: 0.0585383
[28]    training's auc: 0.804252    training's binary_logloss: 0.0866573    valid_1's auc: 0.811128 valid_1's binary_logloss: 0.0584608
[29]    training's auc: 0.804553    training's binary_logloss: 0.0865065    valid_1's auc: 0.811729 valid_1's binary_logloss: 0.0583086
[30]    training's auc: 0.804338    training's binary_logloss: 0.0863911    valid_1's auc: 0.811911 valid_1's binary_logloss: 0.0582117
[31]    training's auc: 0.804622    training's binary_logloss: 0.0862567    valid_1's auc: 0.811873 valid_1's binary_logloss: 0.0580706
[32]    training's auc: 0.80478 training's binary_logloss: 0.0861849    valid_1's auc: 0.811709 valid_1's binary_logloss: 0.0580063
[33]    training's auc: 0.805155    training's binary_logloss: 0.0860736    valid_1's auc: 0.811696 valid_1's binary_logloss: 0.0578887
[34]    training's auc: 0.804984    training's binary_logloss: 0.0860161    valid_1's auc: 0.811514 valid_1's binary_logloss: 0.0578275
[35]    training's auc: 0.805   training's binary_logloss: 0.0859207    valid_1's auc: 0.811745 valid_1's binary_logloss: 0.0577335
[36]    training's auc: 0.805193    training's binary_logloss: 0.0858218    valid_1's auc: 0.812049 valid_1's binary_logloss: 0.0576406
[37]    training's auc: 0.805235    training's binary_logloss: 0.0857342    valid_1's auc: 0.812539 valid_1's binary_logloss: 0.0575412
[38]    training's auc: 0.805632    training's binary_logloss: 0.0856421    valid_1's auc: 0.81291  valid_1's binary_logloss: 0.0574406
[39]    training's auc: 0.805991    training's binary_logloss: 0.0855961    valid_1's auc: 0.813657 valid_1's binary_logloss: 0.0574029
[40]    training's auc: 0.806425    training's binary_logloss: 0.0855149    valid_1's auc: 0.814025 valid_1's binary_logloss: 0.0573064
[41]    training's auc: 0.806606    training's binary_logloss: 0.0854352    valid_1's auc: 0.814374 valid_1's binary_logloss: 0.0572174
[42]    training's auc: 0.806573    training's binary_logloss: 0.0853742    valid_1's auc: 0.814086 valid_1's binary_logloss: 0.0571594
[43]    training's auc: 0.806743    training's binary_logloss: 0.0853023    valid_1's auc: 0.814595 valid_1's binary_logloss: 0.0570839
[44]    training's auc: 0.806743    training's binary_logloss: 0.0852461    valid_1's auc: 0.815221 valid_1's binary_logloss: 0.0570194
[45]    training's auc: 0.806815    training's binary_logloss: 0.0851931    valid_1's auc: 0.81454  valid_1's binary_logloss: 0.0569637
[46]    training's auc: 0.807021    training's binary_logloss: 0.085143 valid_1's auc: 0.814218 valid_1's binary_logloss: 0.0569145
[47]    training's auc: 0.806955    training's binary_logloss: 0.0850872    valid_1's auc: 0.814365 valid_1's binary_logloss: 0.0568348
[48]    training's auc: 0.807045    training's binary_logloss: 0.0850439    valid_1's auc: 0.814749 valid_1's binary_logloss: 0.0567858
[49]    training's auc: 0.807163    training's binary_logloss: 0.0850047    valid_1's auc: 0.815054 valid_1's binary_logloss: 0.0567532
[50]    training's auc: 0.807072    training's binary_logloss: 0.0849657    valid_1's auc: 0.81448  valid_1's binary_logloss: 0.0567092
[51]    training's auc: 0.807059    training's binary_logloss: 0.0849375    valid_1's auc: 0.814442 valid_1's binary_logloss: 0.056675
[52]    training's auc: 0.807123    training's binary_logloss: 0.0848976    valid_1's auc: 0.814686 valid_1's binary_logloss: 0.0566136
[53]    training's auc: 0.807273    training's binary_logloss: 0.0848829    valid_1's auc: 0.815037 valid_1's binary_logloss: 0.0566088
[54]    training's auc: 0.807267    training's binary_logloss: 0.0848495    valid_1's auc: 0.814929 valid_1's binary_logloss: 0.0565668
[55]    training's auc: 0.807479    training's binary_logloss: 0.0848183    valid_1's auc: 0.815321 valid_1's binary_logloss: 0.056538
[56]    training's auc: 0.807608    training's binary_logloss: 0.0847882    valid_1's auc: 0.815785 valid_1's binary_logloss: 0.0565081
[57]    training's auc: 0.807705    training's binary_logloss: 0.0847571    valid_1's auc: 0.81586  valid_1's binary_logloss: 0.0564711
[58]    training's auc: 0.807829    training's binary_logloss: 0.0847242    valid_1's auc: 0.816078 valid_1's binary_logloss: 0.0564347
[59]    training's auc: 0.807836    training's binary_logloss: 0.0846966    valid_1's auc: 0.815969 valid_1's binary_logloss: 0.0564051
[60]    training's auc: 0.807932    training's binary_logloss: 0.084677 valid_1's auc: 0.816165 valid_1's binary_logloss: 0.0563777
[61]    training's auc: 0.808128    training's binary_logloss: 0.0846542    valid_1's auc: 0.816272 valid_1's binary_logloss: 0.0563616
[62]    training's auc: 0.808032    training's binary_logloss: 0.084626 valid_1's auc: 0.816418 valid_1's binary_logloss: 0.0563228
[63]    training's auc: 0.808202    training's binary_logloss: 0.0846007    valid_1's auc: 0.816501 valid_1's binary_logloss: 0.0562933
[64]    training's auc: 0.808293    training's binary_logloss: 0.0845614    valid_1's auc: 0.816811 valid_1's binary_logloss: 0.0562547
[65]    training's auc: 0.808388    training's binary_logloss: 0.0845395    valid_1's auc: 0.81681  valid_1's binary_logloss: 0.0562309
[66]    training's auc: 0.808498    training's binary_logloss: 0.0845224    valid_1's auc: 0.81661  valid_1's binary_logloss: 0.0562206
[67]    training's auc: 0.808623    training's binary_logloss: 0.0845026    valid_1's auc: 0.816483 valid_1's binary_logloss: 0.0561998
[68]    training's auc: 0.808581    training's binary_logloss: 0.0844806    valid_1's auc: 0.816755 valid_1's binary_logloss: 0.0561771
[69]    training's auc: 0.808788    training's binary_logloss: 0.0844683    valid_1's auc: 0.81722  valid_1's binary_logloss: 0.0561672
[70]    training's auc: 0.808805    training's binary_logloss: 0.0844468    valid_1's auc: 0.817082 valid_1's binary_logloss: 0.0561362
[71]    training's auc: 0.809022    training's binary_logloss: 0.084432 valid_1's auc: 0.817074 valid_1's binary_logloss: 0.0561312
[72]    training's auc: 0.809063    training's binary_logloss: 0.0844117    valid_1's auc: 0.816864 valid_1's binary_logloss: 0.0561104
[73]    training's auc: 0.809107    training's binary_logloss: 0.0843966    valid_1's auc: 0.816877 valid_1's binary_logloss: 0.0560993
[74]    training's auc: 0.809203    training's binary_logloss: 0.0843875    valid_1's auc: 0.816657 valid_1's binary_logloss: 0.0560922
[75]    training's auc: 0.809259    training's binary_logloss: 0.0843759    valid_1's auc: 0.816402 valid_1's binary_logloss: 0.056085
[76]    training's auc: 0.809361    training's binary_logloss: 0.0843586    valid_1's auc: 0.816517 valid_1's binary_logloss: 0.0560751
[77]    training's auc: 0.809421    training's binary_logloss: 0.084339 valid_1's auc: 0.816594 valid_1's binary_logloss: 0.0560546
[78]    training's auc: 0.809535    training's binary_logloss: 0.0843318    valid_1's auc: 0.816616 valid_1's binary_logloss: 0.0560484
[79]    training's auc: 0.80968 training's binary_logloss: 0.0843194    valid_1's auc: 0.816911 valid_1's binary_logloss: 0.0560291
[80]    training's auc: 0.8098  training's binary_logloss: 0.0843084    valid_1's auc: 0.816888 valid_1's binary_logloss: 0.0560224
[81]    training's auc: 0.809876    training's binary_logloss: 0.0842951    valid_1's auc: 0.816484 valid_1's binary_logloss: 0.0560072
[82]    training's auc: 0.809964    training's binary_logloss: 0.0842853    valid_1's auc: 0.816303 valid_1's binary_logloss: 0.0560108
[83]    training's auc: 0.810191    training's binary_logloss: 0.0842674    valid_1's auc: 0.816584 valid_1's binary_logloss: 0.0559851
[84]    training's auc: 0.810264    training's binary_logloss: 0.0842563    valid_1's auc: 0.816697 valid_1's binary_logloss: 0.0559769
[85]    training's auc: 0.810346    training's binary_logloss: 0.0842448    valid_1's auc: 0.816915 valid_1's binary_logloss: 0.0559724
[86]    training's auc: 0.81037 training's binary_logloss: 0.084233 valid_1's auc: 0.816981 valid_1's binary_logloss: 0.0559618
[87]    training's auc: 0.810337    training's binary_logloss: 0.0842191    valid_1's auc: 0.816701 valid_1's binary_logloss: 0.0559556
[88]    training's auc: 0.81048 training's binary_logloss: 0.0842017    valid_1's auc: 0.816641 valid_1's binary_logloss: 0.0559454
[89]    training's auc: 0.810608    training's binary_logloss: 0.0841822    valid_1's auc: 0.816591 valid_1's binary_logloss: 0.0559269
[90]    training's auc: 0.810655    training's binary_logloss: 0.0841739    valid_1's auc: 0.816726 valid_1's binary_logloss: 0.0559173
[91]    training's auc: 0.810719    training's binary_logloss: 0.0841643    valid_1's auc: 0.816838 valid_1's binary_logloss: 0.0559096
[92]    training's auc: 0.810818    training's binary_logloss: 0.084155 valid_1's auc: 0.817027 valid_1's binary_logloss: 0.0558909
[93]    training's auc: 0.810814    training's binary_logloss: 0.0841443    valid_1's auc: 0.81709  valid_1's binary_logloss: 0.0558779
[94]    training's auc: 0.810849    training's binary_logloss: 0.0841328    valid_1's auc: 0.816985 valid_1's binary_logloss: 0.0558654
[95]    training's auc: 0.810878    training's binary_logloss: 0.0841239    valid_1's auc: 0.817107 valid_1's binary_logloss: 0.0558463
[96]    training's auc: 0.810903    training's binary_logloss: 0.0841197    valid_1's auc: 0.816735 valid_1's binary_logloss: 0.0558617
[97]    training's auc: 0.810934    training's binary_logloss: 0.0841127    valid_1's auc: 0.816641 valid_1's binary_logloss: 0.0558565
[98]    training's auc: 0.810958    training's binary_logloss: 0.0841034    valid_1's auc: 0.816856 valid_1's binary_logloss: 0.0558393
[99]    training's auc: 0.811096    training's binary_logloss: 0.0840931    valid_1's auc: 0.816683 valid_1's binary_logloss: 0.0558354
[100]   training's auc: 0.811127    training's binary_logloss: 0.0840849    valid_1's auc: 0.816753 valid_1's binary_logloss: 0.0558261
[101]   training's auc: 0.811209    training's binary_logloss: 0.084073 valid_1's auc: 0.81679  valid_1's binary_logloss: 0.0558183
[102]   training's auc: 0.811326    training's binary_logloss: 0.0840615    valid_1's auc: 0.816839 valid_1's binary_logloss: 0.0558055
[103]   training's auc: 0.811331    training's binary_logloss: 0.084058 valid_1's auc: 0.816707 valid_1's binary_logloss: 0.055805
[104]   training's auc: 0.811495    training's binary_logloss: 0.0840347    valid_1's auc: 0.816186 valid_1's binary_logloss: 0.0558033
[105]   training's auc: 0.811504    training's binary_logloss: 0.0840285    valid_1's auc: 0.815966 valid_1's binary_logloss: 0.0558024
[106]   training's auc: 0.811606    training's binary_logloss: 0.0840172    valid_1's auc: 0.815874 valid_1's binary_logloss: 0.0557894
[107]   training's auc: 0.811649    training's binary_logloss: 0.0840077    valid_1's auc: 0.816063 valid_1's binary_logloss: 0.0557843
[108]   training's auc: 0.811723    training's binary_logloss: 0.0840034    valid_1's auc: 0.815978 valid_1's binary_logloss: 0.0557805
[109]   training's auc: 0.811758    training's binary_logloss: 0.0839971    valid_1's auc: 0.816078 valid_1's binary_logloss: 0.0557829
[110]   training's auc: 0.811774    training's binary_logloss: 0.0839932    valid_1's auc: 0.816043 valid_1's binary_logloss: 0.0557805
[111]   training's auc: 0.811827    training's binary_logloss: 0.0839894    valid_1's auc: 0.816089 valid_1's binary_logloss: 0.0557754
[112]   training's auc: 0.811903    training's binary_logloss: 0.0839828    valid_1's auc: 0.816169 valid_1's binary_logloss: 0.0557696
[113]   training's auc: 0.811885    training's binary_logloss: 0.0839783    valid_1's auc: 0.81611  valid_1's binary_logloss: 0.055759
[114]   training's auc: 0.812008    training's binary_logloss: 0.0839713    valid_1's auc: 0.816059 valid_1's binary_logloss: 0.0557656
[115]   training's auc: 0.812011    training's binary_logloss: 0.0839668    valid_1's auc: 0.81613  valid_1's binary_logloss: 0.0557569
[116]   training's auc: 0.812099    training's binary_logloss: 0.0839602    valid_1's auc: 0.816182 valid_1's binary_logloss: 0.0557503
[117]   training's auc: 0.812161    training's binary_logloss: 0.0839484    valid_1's auc: 0.816163 valid_1's binary_logloss: 0.0557415
[118]   training's auc: 0.812206    training's binary_logloss: 0.083943 valid_1's auc: 0.81609  valid_1's binary_logloss: 0.055739
[119]   training's auc: 0.812265    training's binary_logloss: 0.0839348    valid_1's auc: 0.816047 valid_1's binary_logloss: 0.0557407
[120]   training's auc: 0.812401    training's binary_logloss: 0.0839245    valid_1's auc: 0.816042 valid_1's binary_logloss: 0.0557514
[121]   training's auc: 0.812466    training's binary_logloss: 0.0839183    valid_1's auc: 0.816251 valid_1's binary_logloss: 0.0557414
[122]   training's auc: 0.812562    training's binary_logloss: 0.0839112    valid_1's auc: 0.816465 valid_1's binary_logloss: 0.055734
[123]   training's auc: 0.812579    training's binary_logloss: 0.0839054    valid_1's auc: 0.816412 valid_1's binary_logloss: 0.0557337
[124]   training's auc: 0.812694    training's binary_logloss: 0.0838958    valid_1's auc: 0.816396 valid_1's binary_logloss: 0.0557388
[125]   training's auc: 0.812792    training's binary_logloss: 0.0838834    valid_1's auc: 0.816347 valid_1's binary_logloss: 0.055728
[126]   training's auc: 0.812853    training's binary_logloss: 0.0838757    valid_1's auc: 0.816155 valid_1's binary_logloss: 0.0557399
[127]   training's auc: 0.812859    training's binary_logloss: 0.0838717    valid_1's auc: 0.816179 valid_1's binary_logloss: 0.0557382
[128]   training's auc: 0.81286 training's binary_logloss: 0.0838678    valid_1's auc: 0.816188 valid_1's binary_logloss: 0.0557338
[129]   training's auc: 0.812902    training's binary_logloss: 0.083861 valid_1's auc: 0.816339 valid_1's binary_logloss: 0.0557306
[130]   training's auc: 0.812934    training's binary_logloss: 0.0838547    valid_1's auc: 0.816338 valid_1's binary_logloss: 0.0557402
[131]   training's auc: 0.812968    training's binary_logloss: 0.0838511    valid_1's auc: 0.816398 valid_1's binary_logloss: 0.0557361
[132]   training's auc: 0.812989    training's binary_logloss: 0.0838471    valid_1's auc: 0.816365 valid_1's binary_logloss: 0.0557305
[133]   training's auc: 0.813074    training's binary_logloss: 0.0838381    valid_1's auc: 0.816402 valid_1's binary_logloss: 0.055732
[134]   training's auc: 0.813081    training's binary_logloss: 0.083832 valid_1's auc: 0.816407 valid_1's binary_logloss: 0.0557204
[135]   training's auc: 0.813148    training's binary_logloss: 0.0838236    valid_1's auc: 0.816402 valid_1's binary_logloss: 0.0557181
[136]   training's auc: 0.813278    training's binary_logloss: 0.0838136    valid_1's auc: 0.816311 valid_1's binary_logloss: 0.0557265
[137]   training's auc: 0.813272    training's binary_logloss: 0.0838089    valid_1's auc: 0.816138 valid_1's binary_logloss: 0.0557348
[138]   training's auc: 0.813294    training's binary_logloss: 0.0838043    valid_1's auc: 0.816023 valid_1's binary_logloss: 0.0557341
[139]   training's auc: 0.813322    training's binary_logloss: 0.083801 valid_1's auc: 0.815985 valid_1's binary_logloss: 0.0557357
[140]   training's auc: 0.813366    training's binary_logloss: 0.0837942    valid_1's auc: 0.815988 valid_1's binary_logloss: 0.0557344
[141]   training's auc: 0.813386    training's binary_logloss: 0.0837917    valid_1's auc: 0.816145 valid_1's binary_logloss: 0.0557285
[142]   training's auc: 0.81343 training's binary_logloss: 0.0837881    valid_1's auc: 0.816108 valid_1's binary_logloss: 0.0557306
[143]   training's auc: 0.813439    training's binary_logloss: 0.0837797    valid_1's auc: 0.816183 valid_1's binary_logloss: 0.0557132
[144]   training's auc: 0.813438    training's binary_logloss: 0.0837708    valid_1's auc: 0.815809 valid_1's binary_logloss: 0.0557276
[145]   training's auc: 0.813518    training's binary_logloss: 0.0837577    valid_1's auc: 0.815753 valid_1's binary_logloss: 0.055723
[146]   training's auc: 0.813544    training's binary_logloss: 0.0837528    valid_1's auc: 0.81582  valid_1's binary_logloss: 0.0557235
[147]   training's auc: 0.813547    training's binary_logloss: 0.0837494    valid_1's auc: 0.815739 valid_1's binary_logloss: 0.0557281
[148]   training's auc: 0.813601    training's binary_logloss: 0.0837412    valid_1's auc: 0.815785 valid_1's binary_logloss: 0.0557239
[149]   training's auc: 0.813631    training's binary_logloss: 0.0837374    valid_1's auc: 0.815803 valid_1's binary_logloss: 0.0557217
[150]   training's auc: 0.813702    training's binary_logloss: 0.0837295    valid_1's auc: 0.815734 valid_1's binary_logloss: 0.0557281
[151]   training's auc: 0.81378 training's binary_logloss: 0.0837239    valid_1's auc: 0.8157   valid_1's binary_logloss: 0.0557294
[152]   training's auc: 0.813797    training's binary_logloss: 0.0837201    valid_1's auc: 0.815536 valid_1's binary_logloss: 0.0557362
[153]   training's auc: 0.813909    training's binary_logloss: 0.0837115    valid_1's auc: 0.815413 valid_1's binary_logloss: 0.0557369
[154]   training's auc: 0.814048    training's binary_logloss: 0.083701 valid_1's auc: 0.815291 valid_1's binary_logloss: 0.0557463
[155]   training's auc: 0.814129    training's binary_logloss: 0.0836938    valid_1's auc: 0.815297 valid_1's binary_logloss: 0.0557441
[156]   training's auc: 0.81416 training's binary_logloss: 0.0836791    valid_1's auc: 0.81537  valid_1's binary_logloss: 0.0557294
[157]   training's auc: 0.814207    training's binary_logloss: 0.0836744    valid_1's auc: 0.815386 valid_1's binary_logloss: 0.0557321
[158]   training's auc: 0.814229    training's binary_logloss: 0.0836614    valid_1's auc: 0.815496 valid_1's binary_logloss: 0.0557198
[159]   training's auc: 0.814273    training's binary_logloss: 0.0836503    valid_1's auc: 0.815553 valid_1's binary_logloss: 0.0557219
[160]   training's auc: 0.814337    training's binary_logloss: 0.0836455    valid_1's auc: 0.815676 valid_1's binary_logloss: 0.0557211
[161]   training's auc: 0.814365    training's binary_logloss: 0.083643 valid_1's auc: 0.815727 valid_1's binary_logloss: 0.0557114
[162]   training's auc: 0.814391    training's binary_logloss: 0.0836374    valid_1's auc: 0.81575  valid_1's binary_logloss: 0.0557144
[163]   training's auc: 0.814438    training's binary_logloss: 0.0836316    valid_1's auc: 0.815773 valid_1's binary_logloss: 0.0557121
[164]   training's auc: 0.814479    training's binary_logloss: 0.0836259    valid_1's auc: 0.815514 valid_1's binary_logloss: 0.0557137
[165]   training's auc: 0.814536    training's binary_logloss: 0.0836069    valid_1's auc: 0.815567 valid_1's binary_logloss: 0.0557065
[166]   training's auc: 0.81458 training's binary_logloss: 0.0836019    valid_1's auc: 0.81561  valid_1's binary_logloss: 0.0557044
[167]   training's auc: 0.814629    training's binary_logloss: 0.0835964    valid_1's auc: 0.815543 valid_1's binary_logloss: 0.0557057
[168]   training's auc: 0.814677    training's binary_logloss: 0.0835905    valid_1's auc: 0.815461 valid_1's binary_logloss: 0.05571
[169]   training's auc: 0.814701    training's binary_logloss: 0.0835872    valid_1's auc: 0.815485 valid_1's binary_logloss: 0.0557125
Early stopping, best iteration is:
[69]    training's auc: 0.808788    training's binary_logloss: 0.0844683    valid_1's auc: 0.81722  valid_1's binary_logloss: 0.0561672
8
[1] training's auc: 0.651876    training's binary_logloss: 0.0966336    valid_1's auc: 0.672491 valid_1's binary_logloss: 0.0712814
Training until validation scores don't improve for 100 rounds.
[2] training's auc: 0.782222    training's binary_logloss: 0.0952334    valid_1's auc: 0.796897 valid_1's binary_logloss: 0.0703147
[3] training's auc: 0.782296    training's binary_logloss: 0.0946264    valid_1's auc: 0.797943 valid_1's binary_logloss: 0.0698724
[4] training's auc: 0.78281 training's binary_logloss: 0.0940255    valid_1's auc: 0.796878 valid_1's binary_logloss: 0.0693916
[5] training's auc: 0.790877    training's binary_logloss: 0.0930066    valid_1's auc: 0.802175 valid_1's binary_logloss: 0.0687081
[6] training's auc: 0.795996    training's binary_logloss: 0.0920978    valid_1's auc: 0.812757 valid_1's binary_logloss: 0.068065
[7] training's auc: 0.794912    training's binary_logloss: 0.0916311    valid_1's auc: 0.810214 valid_1's binary_logloss: 0.0676846
[8] training's auc: 0.795266    training's binary_logloss: 0.0909168    valid_1's auc: 0.808938 valid_1's binary_logloss: 0.0671766
[9] training's auc: 0.798298    training's binary_logloss: 0.0907412    valid_1's auc: 0.815323 valid_1's binary_logloss: 0.0669682
[10]    training's auc: 0.797394    training's binary_logloss: 0.0904158    valid_1's auc: 0.814292 valid_1's binary_logloss: 0.0666602
[11]    training's auc: 0.797961    training's binary_logloss: 0.0897895    valid_1's auc: 0.813742 valid_1's binary_logloss: 0.0662159
[12]    training's auc: 0.799646    training's binary_logloss: 0.089227 valid_1's auc: 0.817797 valid_1's binary_logloss: 0.065767
[13]    training's auc: 0.798912    training's binary_logloss: 0.0889946    valid_1's auc: 0.817083 valid_1's binary_logloss: 0.0655623
[14]    training's auc: 0.798524    training's binary_logloss: 0.0887722    valid_1's auc: 0.816027 valid_1's binary_logloss: 0.065345
[15]    training's auc: 0.799311    training's binary_logloss: 0.0883419    valid_1's auc: 0.816868 valid_1's binary_logloss: 0.0650306
[16]    training's auc: 0.800168    training's binary_logloss: 0.0879358    valid_1's auc: 0.818169 valid_1's binary_logloss: 0.064701
[17]    training's auc: 0.800163    training's binary_logloss: 0.0877144    valid_1's auc: 0.817945 valid_1's binary_logloss: 0.0644967
[18]    training's auc: 0.800814    training's binary_logloss: 0.0873888    valid_1's auc: 0.819244 valid_1's binary_logloss: 0.0642213
[19]    training's auc: 0.800638    training's binary_logloss: 0.08709  valid_1's auc: 0.819727 valid_1's binary_logloss: 0.0639646
[20]    training's auc: 0.800659    training's binary_logloss: 0.0869065    valid_1's auc: 0.819501 valid_1's binary_logloss: 0.0637845
[21]    training's auc: 0.801281    training's binary_logloss: 0.0866696    valid_1's auc: 0.819769 valid_1's binary_logloss: 0.0635872
[22]    training's auc: 0.801517    training's binary_logloss: 0.0864386    valid_1's auc: 0.820189 valid_1's binary_logloss: 0.0633721
[23]    training's auc: 0.802045    training's binary_logloss: 0.0862893    valid_1's auc: 0.820194 valid_1's binary_logloss: 0.0632338
[24]    training's auc: 0.802062    training's binary_logloss: 0.0860903    valid_1's auc: 0.820576 valid_1's binary_logloss: 0.0630546
[25]    training's auc: 0.802013    training's binary_logloss: 0.0859674    valid_1's auc: 0.82046  valid_1's binary_logloss: 0.0629284
[26]    training's auc: 0.802355    training's binary_logloss: 0.0858004    valid_1's auc: 0.821287 valid_1's binary_logloss: 0.0627763
[27]    training's auc: 0.802812    training's binary_logloss: 0.0856565    valid_1's auc: 0.821568 valid_1's binary_logloss: 0.0626215
[28]    training's auc: 0.803651    training's binary_logloss: 0.0855865    valid_1's auc: 0.820862 valid_1's binary_logloss: 0.0625611
[29]    training's auc: 0.803934    training's binary_logloss: 0.0854361    valid_1's auc: 0.821599 valid_1's binary_logloss: 0.0624167
[30]    training's auc: 0.804023    training's binary_logloss: 0.0853239    valid_1's auc: 0.821448 valid_1's binary_logloss: 0.0623072
[31]    training's auc: 0.804164    training's binary_logloss: 0.0851905    valid_1's auc: 0.822445 valid_1's binary_logloss: 0.0621656
[32]    training's auc: 0.804365    training's binary_logloss: 0.0851225    valid_1's auc: 0.822411 valid_1's binary_logloss: 0.0620876
[33]    training's auc: 0.804282    training's binary_logloss: 0.0850101    valid_1's auc: 0.822785 valid_1's binary_logloss: 0.0619808
[34]    training's auc: 0.804337    training's binary_logloss: 0.0849559    valid_1's auc: 0.823679 valid_1's binary_logloss: 0.0619049
[35]    training's auc: 0.804402    training's binary_logloss: 0.0848642    valid_1's auc: 0.824117 valid_1's binary_logloss: 0.0618065
[36]    training's auc: 0.804437    training's binary_logloss: 0.0847682    valid_1's auc: 0.82468  valid_1's binary_logloss: 0.0616965
[37]    training's auc: 0.804705    training's binary_logloss: 0.0846795    valid_1's auc: 0.824886 valid_1's binary_logloss: 0.0616042
[38]    training's auc: 0.804914    training's binary_logloss: 0.0845808    valid_1's auc: 0.824717 valid_1's binary_logloss: 0.0615223
[39]    training's auc: 0.80542 training's binary_logloss: 0.0845329    valid_1's auc: 0.824429 valid_1's binary_logloss: 0.0614744
[40]    training's auc: 0.80561 training's binary_logloss: 0.0844504    valid_1's auc: 0.823931 valid_1's binary_logloss: 0.0613888
[41]    training's auc: 0.805765    training's binary_logloss: 0.0843685    valid_1's auc: 0.823855 valid_1's binary_logloss: 0.0613149
[42]    training's auc: 0.805723    training's binary_logloss: 0.084313 valid_1's auc: 0.824308 valid_1's binary_logloss: 0.0612435
[43]    training's auc: 0.805933    training's binary_logloss: 0.0842366    valid_1's auc: 0.824345 valid_1's binary_logloss: 0.0611773
[44]    training's auc: 0.805917    training's binary_logloss: 0.0841822    valid_1's auc: 0.82491  valid_1's binary_logloss: 0.0611007
[45]    training's auc: 0.806125    training's binary_logloss: 0.0841292    valid_1's auc: 0.82475  valid_1's binary_logloss: 0.0610397
[46]    training's auc: 0.806393    training's binary_logloss: 0.0840789    valid_1's auc: 0.824461 valid_1's binary_logloss: 0.0609927
[47]    training's auc: 0.806412    training's binary_logloss: 0.0840263    valid_1's auc: 0.824824 valid_1's binary_logloss: 0.0609207
[48]    training's auc: 0.80666 training's binary_logloss: 0.0839782    valid_1's auc: 0.825245 valid_1's binary_logloss: 0.0608693
[49]    training's auc: 0.806707    training's binary_logloss: 0.0839407    valid_1's auc: 0.825594 valid_1's binary_logloss: 0.0608292
[50]    training's auc: 0.80672 training's binary_logloss: 0.0839073    valid_1's auc: 0.825844 valid_1's binary_logloss: 0.0607807
[51]    training's auc: 0.806756    training's binary_logloss: 0.0838608    valid_1's auc: 0.825818 valid_1's binary_logloss: 0.060727
[52]    training's auc: 0.806669    training's binary_logloss: 0.0838224    valid_1's auc: 0.82552  valid_1's binary_logloss: 0.06067
[53]    training's auc: 0.806651    training's binary_logloss: 0.0838034    valid_1's auc: 0.825846 valid_1's binary_logloss: 0.060648
[54]    training's auc: 0.806617    training's binary_logloss: 0.0837733    valid_1's auc: 0.825788 valid_1's binary_logloss: 0.0606121
[55]    training's auc: 0.806792    training's binary_logloss: 0.0837419    valid_1's auc: 0.825677 valid_1's binary_logloss: 0.0605781
[56]    training's auc: 0.807028    training's binary_logloss: 0.0837113    valid_1's auc: 0.825301 valid_1's binary_logloss: 0.0605593
[57]    training's auc: 0.807191    training's binary_logloss: 0.0836711    valid_1's auc: 0.825041 valid_1's binary_logloss: 0.0605329
[58]    training's auc: 0.807456    training's binary_logloss: 0.0836309    valid_1's auc: 0.824855 valid_1's binary_logloss: 0.0605086
[59]    training's auc: 0.807649    training's binary_logloss: 0.0836081    valid_1's auc: 0.824865 valid_1's binary_logloss: 0.0604993
[60]    training's auc: 0.807782    training's binary_logloss: 0.083587 valid_1's auc: 0.824816 valid_1's binary_logloss: 0.0604744
[61]    training's auc: 0.807984    training's binary_logloss: 0.0835607    valid_1's auc: 0.824744 valid_1's binary_logloss: 0.0604517
[62]    training's auc: 0.807988    training's binary_logloss: 0.0835311    valid_1's auc: 0.824757 valid_1's binary_logloss: 0.0604051
[63]    training's auc: 0.808153    training's binary_logloss: 0.0835002    valid_1's auc: 0.824989 valid_1's binary_logloss: 0.0603817
[64]    training's auc: 0.808191    training's binary_logloss: 0.0834812    valid_1's auc: 0.824957 valid_1's binary_logloss: 0.0603725
[65]    training's auc: 0.808286    training's binary_logloss: 0.0834613    valid_1's auc: 0.824772 valid_1's binary_logloss: 0.0603514
[66]    training's auc: 0.808291    training's binary_logloss: 0.0834439    valid_1's auc: 0.824602 valid_1's binary_logloss: 0.0603396
[67]    training's auc: 0.808445    training's binary_logloss: 0.0834233    valid_1's auc: 0.824643 valid_1's binary_logloss: 0.0603051
[68]    training's auc: 0.808687    training's binary_logloss: 0.0833945    valid_1's auc: 0.824499 valid_1's binary_logloss: 0.0603005
[69]    training's auc: 0.808858    training's binary_logloss: 0.0833786    valid_1's auc: 0.824734 valid_1's binary_logloss: 0.0602759
[70]    training's auc: 0.808975    training's binary_logloss: 0.0833647    valid_1's auc: 0.824802 valid_1's binary_logloss: 0.0602611
[71]    training's auc: 0.809138    training's binary_logloss: 0.0833506    valid_1's auc: 0.824615 valid_1's binary_logloss: 0.0602555
[72]    training's auc: 0.80915 training's binary_logloss: 0.0833292    valid_1's auc: 0.824555 valid_1's binary_logloss: 0.0602288
[73]    training's auc: 0.809218    training's binary_logloss: 0.083313 valid_1's auc: 0.824424 valid_1's binary_logloss: 0.0602239
[74]    training's auc: 0.809292    training's binary_logloss: 0.0833036    valid_1's auc: 0.824408 valid_1's binary_logloss: 0.0602215
[75]    training's auc: 0.809399    training's binary_logloss: 0.083288 valid_1's auc: 0.824159 valid_1's binary_logloss: 0.0602156
[76]    training's auc: 0.809528    training's binary_logloss: 0.0832685    valid_1's auc: 0.824594 valid_1's binary_logloss: 0.0601978
[77]    training's auc: 0.809609    training's binary_logloss: 0.0832551    valid_1's auc: 0.824379 valid_1's binary_logloss: 0.0601937
[78]    training's auc: 0.809667    training's binary_logloss: 0.0832479    valid_1's auc: 0.824251 valid_1's binary_logloss: 0.0602006
[79]    training's auc: 0.809771    training's binary_logloss: 0.0832364    valid_1's auc: 0.82418  valid_1's binary_logloss: 0.0601822
[80]    training's auc: 0.809818    training's binary_logloss: 0.0832251    valid_1's auc: 0.823997 valid_1's binary_logloss: 0.0601747
[81]    training's auc: 0.809845    training's binary_logloss: 0.0831997    valid_1's auc: 0.823921 valid_1's binary_logloss: 0.0601597
[82]    training's auc: 0.809981    training's binary_logloss: 0.0831874    valid_1's auc: 0.823746 valid_1's binary_logloss: 0.0601594
[83]    training's auc: 0.810113    training's binary_logloss: 0.0831691    valid_1's auc: 0.823861 valid_1's binary_logloss: 0.0601302
[84]    training's auc: 0.810134    training's binary_logloss: 0.08316  valid_1's auc: 0.823783 valid_1's binary_logloss: 0.0601252
[85]    training's auc: 0.810189    training's binary_logloss: 0.0831473    valid_1's auc: 0.823775 valid_1's binary_logloss: 0.0601218
[86]    training's auc: 0.810156    training's binary_logloss: 0.0831307    valid_1's auc: 0.82359  valid_1's binary_logloss: 0.0601234
[87]    training's auc: 0.810128    training's binary_logloss: 0.0831154    valid_1's auc: 0.823591 valid_1's binary_logloss: 0.0601117
[88]    training's auc: 0.810232    training's binary_logloss: 0.083101 valid_1's auc: 0.823949 valid_1's binary_logloss: 0.0600746
[89]    training's auc: 0.810245    training's binary_logloss: 0.0830823    valid_1's auc: 0.823989 valid_1's binary_logloss: 0.0600623
[90]    training's auc: 0.810291    training's binary_logloss: 0.0830743    valid_1's auc: 0.823931 valid_1's binary_logloss: 0.0600605
[91]    training's auc: 0.810381    training's binary_logloss: 0.0830613    valid_1's auc: 0.823988 valid_1's binary_logloss: 0.060055
[92]    training's auc: 0.810537    training's binary_logloss: 0.0830497    valid_1's auc: 0.823938 valid_1's binary_logloss: 0.0600608
[93]    training's auc: 0.810596    training's binary_logloss: 0.0830327    valid_1's auc: 0.823778 valid_1's binary_logloss: 0.0600516
[94]    training's auc: 0.810681    training's binary_logloss: 0.0830257    valid_1's auc: 0.823809 valid_1's binary_logloss: 0.0600451
[95]    training's auc: 0.810708    training's binary_logloss: 0.0830142    valid_1's auc: 0.82377  valid_1's binary_logloss: 0.0600337
[96]    training's auc: 0.81068 training's binary_logloss: 0.083004 valid_1's auc: 0.823705 valid_1's binary_logloss: 0.0600304
[97]    training's auc: 0.810811    training's binary_logloss: 0.0829835    valid_1's auc: 0.823786 valid_1's binary_logloss: 0.0600237
[98]    training's auc: 0.810819    training's binary_logloss: 0.0829762    valid_1's auc: 0.823652 valid_1's binary_logloss: 0.0600215
[99]    training's auc: 0.810861    training's binary_logloss: 0.0829698    valid_1's auc: 0.823793 valid_1's binary_logloss: 0.0600055
[100]   training's auc: 0.810882    training's binary_logloss: 0.0829642    valid_1's auc: 0.823639 valid_1's binary_logloss: 0.0600004
[101]   training's auc: 0.810986    training's binary_logloss: 0.0829498    valid_1's auc: 0.823674 valid_1's binary_logloss: 0.0599905
[102]   training's auc: 0.811097    training's binary_logloss: 0.0829382    valid_1's auc: 0.823761 valid_1's binary_logloss: 0.0599795
[103]   training's auc: 0.811146    training's binary_logloss: 0.0829249    valid_1's auc: 0.823688 valid_1's binary_logloss: 0.0599688
[104]   training's auc: 0.81129 training's binary_logloss: 0.0829014    valid_1's auc: 0.823731 valid_1's binary_logloss: 0.059965
[105]   training's auc: 0.81128 training's binary_logloss: 0.0828934    valid_1's auc: 0.823728 valid_1's binary_logloss: 0.0599626
[106]   training's auc: 0.811379    training's binary_logloss: 0.0828801    valid_1's auc: 0.823626 valid_1's binary_logloss: 0.0599544
[107]   training's auc: 0.811452    training's binary_logloss: 0.08287  valid_1's auc: 0.82364  valid_1's binary_logloss: 0.0599516
[108]   training's auc: 0.811465    training's binary_logloss: 0.0828583    valid_1's auc: 0.823632 valid_1's binary_logloss: 0.0599532
[109]   training's auc: 0.811584    training's binary_logloss: 0.0828442    valid_1's auc: 0.823672 valid_1's binary_logloss: 0.0599447
[110]   training's auc: 0.811635    training's binary_logloss: 0.0828381    valid_1's auc: 0.82368  valid_1's binary_logloss: 0.059941
[111]   training's auc: 0.811691    training's binary_logloss: 0.0828319    valid_1's auc: 0.823625 valid_1's binary_logloss: 0.0599361
[112]   training's auc: 0.811854    training's binary_logloss: 0.0828229    valid_1's auc: 0.823589 valid_1's binary_logloss: 0.0599318
[113]   training's auc: 0.811929    training's binary_logloss: 0.0828131    valid_1's auc: 0.823237 valid_1's binary_logloss: 0.0599332
[114]   training's auc: 0.81202 training's binary_logloss: 0.0828053    valid_1's auc: 0.822993 valid_1's binary_logloss: 0.0599489
[115]   training's auc: 0.812045    training's binary_logloss: 0.0827987    valid_1's auc: 0.823006 valid_1's binary_logloss: 0.0599442
[116]   training's auc: 0.812097    training's binary_logloss: 0.0827935    valid_1's auc: 0.823076 valid_1's binary_logloss: 0.0599337
[117]   training's auc: 0.812219    training's binary_logloss: 0.0827841    valid_1's auc: 0.823066 valid_1's binary_logloss: 0.0599397
[118]   training's auc: 0.812252    training's binary_logloss: 0.0827784    valid_1's auc: 0.82313  valid_1's binary_logloss: 0.0599415
[119]   training's auc: 0.812329    training's binary_logloss: 0.0827701    valid_1's auc: 0.823238 valid_1's binary_logloss: 0.0599377
[120]   training's auc: 0.812437    training's binary_logloss: 0.0827596    valid_1's auc: 0.823315 valid_1's binary_logloss: 0.0599267
[121]   training's auc: 0.812574    training's binary_logloss: 0.0827483    valid_1's auc: 0.823122 valid_1's binary_logloss: 0.05994
[122]   training's auc: 0.812613    training's binary_logloss: 0.0827436    valid_1's auc: 0.822983 valid_1's binary_logloss: 0.0599483
[123]   training's auc: 0.812644    training's binary_logloss: 0.0827392    valid_1's auc: 0.822837 valid_1's binary_logloss: 0.0599535
[124]   training's auc: 0.812722    training's binary_logloss: 0.0827306    valid_1's auc: 0.82294  valid_1's binary_logloss: 0.059952
[125]   training's auc: 0.812771    training's binary_logloss: 0.0827202    valid_1's auc: 0.822776 valid_1's binary_logloss: 0.059963
[126]   training's auc: 0.812805    training's binary_logloss: 0.0827155    valid_1's auc: 0.822784 valid_1's binary_logloss: 0.0599671
[127]   training's auc: 0.812825    training's binary_logloss: 0.082712 valid_1's auc: 0.822783 valid_1's binary_logloss: 0.0599627
[128]   training's auc: 0.812853    training's binary_logloss: 0.0827075    valid_1's auc: 0.822657 valid_1's binary_logloss: 0.0599685
[129]   training's auc: 0.812864    training's binary_logloss: 0.0827033    valid_1's auc: 0.822662 valid_1's binary_logloss: 0.0599673
[130]   training's auc: 0.812904    training's binary_logloss: 0.0826986    valid_1's auc: 0.82258  valid_1's binary_logloss: 0.0599632
[131]   training's auc: 0.81296 training's binary_logloss: 0.0826931    valid_1's auc: 0.822559 valid_1's binary_logloss: 0.0599625
[132]   training's auc: 0.812977    training's binary_logloss: 0.0826885    valid_1's auc: 0.822703 valid_1's binary_logloss: 0.0599417
[133]   training's auc: 0.813021    training's binary_logloss: 0.0826841    valid_1's auc: 0.822615 valid_1's binary_logloss: 0.0599482
[134]   training's auc: 0.813034    training's binary_logloss: 0.0826782    valid_1's auc: 0.822601 valid_1's binary_logloss: 0.0599442
[135]   training's auc: 0.813064    training's binary_logloss: 0.082674 valid_1's auc: 0.822597 valid_1's binary_logloss: 0.0599422
[136]   training's auc: 0.813152    training's binary_logloss: 0.0826665    valid_1's auc: 0.82274  valid_1's binary_logloss: 0.0599408
[137]   training's auc: 0.813161    training's binary_logloss: 0.0826624    valid_1's auc: 0.822848 valid_1's binary_logloss: 0.0599344
[138]   training's auc: 0.813177    training's binary_logloss: 0.0826574    valid_1's auc: 0.822831 valid_1's binary_logloss: 0.059931
[139]   training's auc: 0.813186    training's binary_logloss: 0.0826547    valid_1's auc: 0.822759 valid_1's binary_logloss: 0.0599297
[140]   training's auc: 0.813299    training's binary_logloss: 0.0826421    valid_1's auc: 0.822658 valid_1's binary_logloss: 0.0599483
[141]   training's auc: 0.813336    training's binary_logloss: 0.0826374    valid_1's auc: 0.822697 valid_1's binary_logloss: 0.0599498
[142]   training's auc: 0.813438    training's binary_logloss: 0.0826291    valid_1's auc: 0.822632 valid_1's binary_logloss: 0.059949
[143]   training's auc: 0.813443    training's binary_logloss: 0.0826246    valid_1's auc: 0.822654 valid_1's binary_logloss: 0.0599424
[144]   training's auc: 0.813508    training's binary_logloss: 0.0826121    valid_1's auc: 0.822624 valid_1's binary_logloss: 0.0599503
[145]   training's auc: 0.813552    training's binary_logloss: 0.0826029    valid_1's auc: 0.82264  valid_1's binary_logloss: 0.059951
[146]   training's auc: 0.813679    training's binary_logloss: 0.082593 valid_1's auc: 0.822462 valid_1's binary_logloss: 0.0599591
[147]   training's auc: 0.813806    training's binary_logloss: 0.082583 valid_1's auc: 0.822468 valid_1's binary_logloss: 0.0599645
[148]   training's auc: 0.8139  training's binary_logloss: 0.0825734    valid_1's auc: 0.822298 valid_1's binary_logloss: 0.0599615
[149]   training's auc: 0.813913    training's binary_logloss: 0.0825699    valid_1's auc: 0.822275 valid_1's binary_logloss: 0.0599551
[150]   training's auc: 0.81395 training's binary_logloss: 0.0825639    valid_1's auc: 0.822281 valid_1's binary_logloss: 0.0599576
[151]   training's auc: 0.81404 training's binary_logloss: 0.0825578    valid_1's auc: 0.82229  valid_1's binary_logloss: 0.0599587
[152]   training's auc: 0.814013    training's binary_logloss: 0.0825556    valid_1's auc: 0.822147 valid_1's binary_logloss: 0.0599682
[153]   training's auc: 0.814033    training's binary_logloss: 0.0825419    valid_1's auc: 0.822199 valid_1's binary_logloss: 0.0599495
Early stopping, best iteration is:
[53]    training's auc: 0.806651    training's binary_logloss: 0.0838034    valid_1's auc: 0.825846 valid_1's binary_logloss: 0.060648
8
[1] training's auc: 0.655534    training's binary_logloss: 0.0921053    valid_1's auc: 0.64957  valid_1's binary_logloss: 0.0890446
Training until validation scores don't improve for 100 rounds.
[2] training's auc: 0.784031    training's binary_logloss: 0.0906779    valid_1's auc: 0.754556 valid_1's binary_logloss: 0.0879805
[3] training's auc: 0.792225    training's binary_logloss: 0.0900435    valid_1's auc: 0.756691 valid_1's binary_logloss: 0.0874868
[4] training's auc: 0.792269    training's binary_logloss: 0.0894428    valid_1's auc: 0.762152 valid_1's binary_logloss: 0.0869681
[5] training's auc: 0.800292    training's binary_logloss: 0.0884055    valid_1's auc: 0.76791  valid_1's binary_logloss: 0.086218
[6] training's auc: 0.806541    training's binary_logloss: 0.0874556    valid_1's auc: 0.773433 valid_1's binary_logloss: 0.0855461
[7] training's auc: 0.80569 training's binary_logloss: 0.0870122    valid_1's auc: 0.77362  valid_1's binary_logloss: 0.0851024
[8] training's auc: 0.805672    training's binary_logloss: 0.0862701    valid_1's auc: 0.775316 valid_1's binary_logloss: 0.0845559
[9] training's auc: 0.808261    training's binary_logloss: 0.0860663    valid_1's auc: 0.779628 valid_1's binary_logloss: 0.0843609
[10]    training's auc: 0.807458    training's binary_logloss: 0.0857178    valid_1's auc: 0.781358 valid_1's binary_logloss: 0.0840543
[11]    training's auc: 0.808021    training's binary_logloss: 0.0850958    valid_1's auc: 0.781175 valid_1's binary_logloss: 0.0836284
[12]    training's auc: 0.810203    training's binary_logloss: 0.0845284    valid_1's auc: 0.782227 valid_1's binary_logloss: 0.0832315
[13]    training's auc: 0.810169    training's binary_logloss: 0.0842948    valid_1's auc: 0.780961 valid_1's binary_logloss: 0.0830693
[14]    training's auc: 0.809282    training's binary_logloss: 0.0840578    valid_1's auc: 0.781346 valid_1's binary_logloss: 0.0828613
[15]    training's auc: 0.810176    training's binary_logloss: 0.0836058    valid_1's auc: 0.781528 valid_1's binary_logloss: 0.0825443
[16]    training's auc: 0.811237    training's binary_logloss: 0.0832011    valid_1's auc: 0.782219 valid_1's binary_logloss: 0.0822844
[17]    training's auc: 0.810978    training's binary_logloss: 0.0829828    valid_1's auc: 0.782345 valid_1's binary_logloss: 0.0820686
[18]    training's auc: 0.811253    training's binary_logloss: 0.0826373    valid_1's auc: 0.782161 valid_1's binary_logloss: 0.0818394
[19]    training's auc: 0.812154    training's binary_logloss: 0.0823514    valid_1's auc: 0.782977 valid_1's binary_logloss: 0.081622
[20]    training's auc: 0.811921    training's binary_logloss: 0.0821776    valid_1's auc: 0.782479 valid_1's binary_logloss: 0.0814714
[21]    training's auc: 0.812247    training's binary_logloss: 0.081933 valid_1's auc: 0.782736 valid_1's binary_logloss: 0.0813235
[22]    training's auc: 0.812982    training's binary_logloss: 0.0816813    valid_1's auc: 0.782961 valid_1's binary_logloss: 0.0811561
[23]    training's auc: 0.813761    training's binary_logloss: 0.0815328    valid_1's auc: 0.782592 valid_1's binary_logloss: 0.0810227
[24]    training's auc: 0.813515    training's binary_logloss: 0.081318 valid_1's auc: 0.78257  valid_1's binary_logloss: 0.0808831
[25]    training's auc: 0.813456    training's binary_logloss: 0.0811932    valid_1's auc: 0.782132 valid_1's binary_logloss: 0.080776
[26]    training's auc: 0.814143    training's binary_logloss: 0.0810232    valid_1's auc: 0.783143 valid_1's binary_logloss: 0.080685
[27]    training's auc: 0.814392    training's binary_logloss: 0.0808586    valid_1's auc: 0.782958 valid_1's binary_logloss: 0.0805982
[28]    training's auc: 0.815114    training's binary_logloss: 0.0807799    valid_1's auc: 0.783904 valid_1's binary_logloss: 0.0805204
[29]    training's auc: 0.815326    training's binary_logloss: 0.0806284    valid_1's auc: 0.783214 valid_1's binary_logloss: 0.0804473
[30]    training's auc: 0.815445    training's binary_logloss: 0.0805127    valid_1's auc: 0.783504 valid_1's binary_logloss: 0.080337
[31]    training's auc: 0.815664    training's binary_logloss: 0.0803734    valid_1's auc: 0.783092 valid_1's binary_logloss: 0.080245
[32]    training's auc: 0.815461    training's binary_logloss: 0.0803042    valid_1's auc: 0.783076 valid_1's binary_logloss: 0.0801969
[33]    training's auc: 0.815944    training's binary_logloss: 0.0801849    valid_1's auc: 0.78335  valid_1's binary_logloss: 0.0801189
[34]    training's auc: 0.816011    training's binary_logloss: 0.0801248    valid_1's auc: 0.783221 valid_1's binary_logloss: 0.0800747
[35]    training's auc: 0.816264    training's binary_logloss: 0.0800343    valid_1's auc: 0.78347  valid_1's binary_logloss: 0.0800399
[36]    training's auc: 0.816413    training's binary_logloss: 0.0799276    valid_1's auc: 0.783579 valid_1's binary_logloss: 0.0799957
[37]    training's auc: 0.816717    training's binary_logloss: 0.0798412    valid_1's auc: 0.783838 valid_1's binary_logloss: 0.0799309
[38]    training's auc: 0.816858    training's binary_logloss: 0.0797467    valid_1's auc: 0.783795 valid_1's binary_logloss: 0.0798736
[39]    training's auc: 0.817364    training's binary_logloss: 0.0796987    valid_1's auc: 0.783824 valid_1's binary_logloss: 0.0798193
[40]    training's auc: 0.817382    training's binary_logloss: 0.0796192    valid_1's auc: 0.783799 valid_1's binary_logloss: 0.0797769
[41]    training's auc: 0.817685    training's binary_logloss: 0.0795346    valid_1's auc: 0.784231 valid_1's binary_logloss: 0.0797304
[42]    training's auc: 0.817907    training's binary_logloss: 0.0794741    valid_1's auc: 0.78407  valid_1's binary_logloss: 0.0797124
[43]    training's auc: 0.818201    training's binary_logloss: 0.0794205    valid_1's auc: 0.784022 valid_1's binary_logloss: 0.0796767
[44]    training's auc: 0.818218    training's binary_logloss: 0.0793631    valid_1's auc: 0.784115 valid_1's binary_logloss: 0.079661
[45]    training's auc: 0.818524    training's binary_logloss: 0.0792982    valid_1's auc: 0.783571 valid_1's binary_logloss: 0.0796149
[46]    training's auc: 0.818555    training's binary_logloss: 0.0792513    valid_1's auc: 0.783648 valid_1's binary_logloss: 0.0795808
[47]    training's auc: 0.818615    training's binary_logloss: 0.0791979    valid_1's auc: 0.783478 valid_1's binary_logloss: 0.0795826
[48]    training's auc: 0.818592    training's binary_logloss: 0.0791522    valid_1's auc: 0.783445 valid_1's binary_logloss: 0.0795619
[49]    training's auc: 0.818876    training's binary_logloss: 0.0791164    valid_1's auc: 0.78382  valid_1's binary_logloss: 0.0795243
[50]    training's auc: 0.819024    training's binary_logloss: 0.0790847    valid_1's auc: 0.783502 valid_1's binary_logloss: 0.0795269
[51]    training's auc: 0.818982    training's binary_logloss: 0.0790376    valid_1's auc: 0.783484 valid_1's binary_logloss: 0.0794939
[52]    training's auc: 0.818955    training's binary_logloss: 0.0790008    valid_1's auc: 0.783276 valid_1's binary_logloss: 0.0794859
[53]    training's auc: 0.819034    training's binary_logloss: 0.078982 valid_1's auc: 0.783205 valid_1's binary_logloss: 0.0794826
[54]    training's auc: 0.819007    training's binary_logloss: 0.0789487    valid_1's auc: 0.783178 valid_1's binary_logloss: 0.0794655
[55]    training's auc: 0.819106    training's binary_logloss: 0.0789174    valid_1's auc: 0.783313 valid_1's binary_logloss: 0.0794325
[56]    training's auc: 0.818988    training's binary_logloss: 0.0788839    valid_1's auc: 0.783629 valid_1's binary_logloss: 0.0794011
[57]    training's auc: 0.819188    training's binary_logloss: 0.0788547    valid_1's auc: 0.783817 valid_1's binary_logloss: 0.0793864
[58]    training's auc: 0.819402    training's binary_logloss: 0.0788221    valid_1's auc: 0.783963 valid_1's binary_logloss: 0.0793541
[59]    training's auc: 0.81949 training's binary_logloss: 0.0787999    valid_1's auc: 0.784072 valid_1's binary_logloss: 0.079331
[60]    training's auc: 0.819589    training's binary_logloss: 0.0787798    valid_1's auc: 0.784306 valid_1's binary_logloss: 0.0793165
[61]    training's auc: 0.819728    training's binary_logloss: 0.0787515    valid_1's auc: 0.784216 valid_1's binary_logloss: 0.0792937
[62]    training's auc: 0.819662    training's binary_logloss: 0.0787233    valid_1's auc: 0.784164 valid_1's binary_logloss: 0.0792801
[63]    training's auc: 0.819813    training's binary_logloss: 0.0787029    valid_1's auc: 0.784571 valid_1's binary_logloss: 0.0792499
[64]    training's auc: 0.819947    training's binary_logloss: 0.0786747    valid_1's auc: 0.78446  valid_1's binary_logloss: 0.0792533
[65]    training's auc: 0.820022    training's binary_logloss: 0.0786538    valid_1's auc: 0.784472 valid_1's binary_logloss: 0.0792316
[66]    training's auc: 0.820127    training's binary_logloss: 0.0786348    valid_1's auc: 0.784664 valid_1's binary_logloss: 0.0792305
[67]    training's auc: 0.820297    training's binary_logloss: 0.0786163    valid_1's auc: 0.784947 valid_1's binary_logloss: 0.0791983
[68]    training's auc: 0.820432    training's binary_logloss: 0.0785906    valid_1's auc: 0.785215 valid_1's binary_logloss: 0.0791634
[69]    training's auc: 0.820567    training's binary_logloss: 0.0785762    valid_1's auc: 0.785291 valid_1's binary_logloss: 0.0791573
[70]    training's auc: 0.820656    training's binary_logloss: 0.078559 valid_1's auc: 0.785022 valid_1's binary_logloss: 0.0791504
[71]    training's auc: 0.820772    training's binary_logloss: 0.0785481    valid_1's auc: 0.785291 valid_1's binary_logloss: 0.079121
[72]    training's auc: 0.820776    training's binary_logloss: 0.0785264    valid_1's auc: 0.785388 valid_1's binary_logloss: 0.0791141
[73]    training's auc: 0.820885    training's binary_logloss: 0.0785048    valid_1's auc: 0.785601 valid_1's binary_logloss: 0.0790928
[74]    training's auc: 0.820993    training's binary_logloss: 0.0784918    valid_1's auc: 0.785674 valid_1's binary_logloss: 0.0790998
[75]    training's auc: 0.821025    training's binary_logloss: 0.0784787    valid_1's auc: 0.785609 valid_1's binary_logloss: 0.0791091
[76]    training's auc: 0.821143    training's binary_logloss: 0.0784582    valid_1's auc: 0.785533 valid_1's binary_logloss: 0.0790983
[77]    training's auc: 0.821153    training's binary_logloss: 0.0784449    valid_1's auc: 0.785453 valid_1's binary_logloss: 0.0790925
[78]    training's auc: 0.821267    training's binary_logloss: 0.0784375    valid_1's auc: 0.785365 valid_1's binary_logloss: 0.0790976
[79]    training's auc: 0.821337    training's binary_logloss: 0.0784289    valid_1's auc: 0.785413 valid_1's binary_logloss: 0.0791004
[80]    training's auc: 0.821432    training's binary_logloss: 0.0784187    valid_1's auc: 0.785352 valid_1's binary_logloss: 0.0791017
[81]    training's auc: 0.821432    training's binary_logloss: 0.0784016    valid_1's auc: 0.78561  valid_1's binary_logloss: 0.0790911
[82]    training's auc: 0.821599    training's binary_logloss: 0.0783875    valid_1's auc: 0.785593 valid_1's binary_logloss: 0.0790856
[83]    training's auc: 0.821741    training's binary_logloss: 0.0783701    valid_1's auc: 0.785815 valid_1's binary_logloss: 0.0790796
[84]    training's auc: 0.821787    training's binary_logloss: 0.0783598    valid_1's auc: 0.785932 valid_1's binary_logloss: 0.0790788
[85]    training's auc: 0.821866    training's binary_logloss: 0.0783494    valid_1's auc: 0.786283 valid_1's binary_logloss: 0.0790641
[86]    training's auc: 0.821849    training's binary_logloss: 0.0783339    valid_1's auc: 0.786232 valid_1's binary_logloss: 0.079058
[87]    training's auc: 0.821839    training's binary_logloss: 0.0783175    valid_1's auc: 0.785939 valid_1's binary_logloss: 0.0790535
[88]    training's auc: 0.821931    training's binary_logloss: 0.0783039    valid_1's auc: 0.78613  valid_1's binary_logloss: 0.0790461
[89]    training's auc: 0.821914    training's binary_logloss: 0.0782978    valid_1's auc: 0.786344 valid_1's binary_logloss: 0.0790378
[90]    training's auc: 0.821963    training's binary_logloss: 0.0782853    valid_1's auc: 0.786261 valid_1's binary_logloss: 0.0790495
[91]    training's auc: 0.822054    training's binary_logloss: 0.0782734    valid_1's auc: 0.786616 valid_1's binary_logloss: 0.0790317
[92]    training's auc: 0.822196    training's binary_logloss: 0.078264 valid_1's auc: 0.78647  valid_1's binary_logloss: 0.0790418
[93]    training's auc: 0.82227 training's binary_logloss: 0.0782511    valid_1's auc: 0.786457 valid_1's binary_logloss: 0.0790237
[94]    training's auc: 0.822332    training's binary_logloss: 0.0782432    valid_1's auc: 0.78645  valid_1's binary_logloss: 0.0790311
[95]    training's auc: 0.822368    training's binary_logloss: 0.0782334    valid_1's auc: 0.786355 valid_1's binary_logloss: 0.0790374
[96]    training's auc: 0.822411    training's binary_logloss: 0.0782272    valid_1's auc: 0.786398 valid_1's binary_logloss: 0.0790194
[97]    training's auc: 0.822502    training's binary_logloss: 0.0782184    valid_1's auc: 0.78646  valid_1's binary_logloss: 0.079007
[98]    training's auc: 0.822563    training's binary_logloss: 0.0782044    valid_1's auc: 0.786401 valid_1's binary_logloss: 0.0789917
[99]    training's auc: 0.822627    training's binary_logloss: 0.0781944    valid_1's auc: 0.786408 valid_1's binary_logloss: 0.0789963
[100]   training's auc: 0.822656    training's binary_logloss: 0.078188 valid_1's auc: 0.786307 valid_1's binary_logloss: 0.0790026
[101]   training's auc: 0.822742    training's binary_logloss: 0.0781771    valid_1's auc: 0.786278 valid_1's binary_logloss: 0.0790049
[102]   training's auc: 0.82282 training's binary_logloss: 0.0781672    valid_1's auc: 0.786192 valid_1's binary_logloss: 0.0790083
[103]   training's auc: 0.822795    training's binary_logloss: 0.0781576    valid_1's auc: 0.786247 valid_1's binary_logloss: 0.0790055
[104]   training's auc: 0.822816    training's binary_logloss: 0.0781485    valid_1's auc: 0.786197 valid_1's binary_logloss: 0.07901
[105]   training's auc: 0.822842    training's binary_logloss: 0.0781402    valid_1's auc: 0.786148 valid_1's binary_logloss: 0.0790222
[106]   training's auc: 0.822924    training's binary_logloss: 0.0781312    valid_1's auc: 0.786128 valid_1's binary_logloss: 0.0790196
[107]   training's auc: 0.822985    training's binary_logloss: 0.0781237    valid_1's auc: 0.786337 valid_1's binary_logloss: 0.07901
[108]   training's auc: 0.823039    training's binary_logloss: 0.0781189    valid_1's auc: 0.786351 valid_1's binary_logloss: 0.0790103
[109]   training's auc: 0.823078    training's binary_logloss: 0.0781112    valid_1's auc: 0.786267 valid_1's binary_logloss: 0.0790051
[110]   training's auc: 0.82319 training's binary_logloss: 0.0781041    valid_1's auc: 0.786363 valid_1's binary_logloss: 0.0790121
[111]   training's auc: 0.823236    training's binary_logloss: 0.0780987    valid_1's auc: 0.786377 valid_1's binary_logloss: 0.0790151
[112]   training's auc: 0.823316    training's binary_logloss: 0.0780912    valid_1's auc: 0.786322 valid_1's binary_logloss: 0.0790217
[113]   training's auc: 0.823261    training's binary_logloss: 0.0780737    valid_1's auc: 0.786354 valid_1's binary_logloss: 0.0790061
[114]   training's auc: 0.823326    training's binary_logloss: 0.0780691    valid_1's auc: 0.786429 valid_1's binary_logloss: 0.0790017
[115]   training's auc: 0.823345    training's binary_logloss: 0.0780657    valid_1's auc: 0.78652  valid_1's binary_logloss: 0.078992
[116]   training's auc: 0.823398    training's binary_logloss: 0.0780629    valid_1's auc: 0.786453 valid_1's binary_logloss: 0.078996
[117]   training's auc: 0.82352 training's binary_logloss: 0.078055 valid_1's auc: 0.786324 valid_1's binary_logloss: 0.0789912
[118]   training's auc: 0.823639    training's binary_logloss: 0.0780459    valid_1's auc: 0.786357 valid_1's binary_logloss: 0.0789927
[119]   training's auc: 0.823765    training's binary_logloss: 0.0780379    valid_1's auc: 0.786363 valid_1's binary_logloss: 0.078994
[120]   training's auc: 0.823795    training's binary_logloss: 0.0780322    valid_1's auc: 0.786589 valid_1's binary_logloss: 0.0789764
[121]   training's auc: 0.82394 training's binary_logloss: 0.0780226    valid_1's auc: 0.786529 valid_1's binary_logloss: 0.0789795
[122]   training's auc: 0.823982    training's binary_logloss: 0.0780177    valid_1's auc: 0.786606 valid_1's binary_logloss: 0.0789843
[123]   training's auc: 0.82402 training's binary_logloss: 0.0780125    valid_1's auc: 0.786626 valid_1's binary_logloss: 0.0789717
[124]   training's auc: 0.824026    training's binary_logloss: 0.0780057    valid_1's auc: 0.786676 valid_1's binary_logloss: 0.0789752
[125]   training's auc: 0.824058    training's binary_logloss: 0.0780015    valid_1's auc: 0.786836 valid_1's binary_logloss: 0.0789708
[126]   training's auc: 0.824083    training's binary_logloss: 0.0779982    valid_1's auc: 0.786917 valid_1's binary_logloss: 0.0789604
[127]   training's auc: 0.824091    training's binary_logloss: 0.0779944    valid_1's auc: 0.787011 valid_1's binary_logloss: 0.0789621
[128]   training's auc: 0.824093    training's binary_logloss: 0.0779903    valid_1's auc: 0.786994 valid_1's binary_logloss: 0.0789605
[129]   training's auc: 0.824115    training's binary_logloss: 0.0779862    valid_1's auc: 0.786917 valid_1's binary_logloss: 0.0789549
[130]   training's auc: 0.824134    training's binary_logloss: 0.07798  valid_1's auc: 0.786869 valid_1's binary_logloss: 0.0789555
[131]   training's auc: 0.824169    training's binary_logloss: 0.0779732    valid_1's auc: 0.786889 valid_1's binary_logloss: 0.0789679
[132]   training's auc: 0.824176    training's binary_logloss: 0.0779674    valid_1's auc: 0.786952 valid_1's binary_logloss: 0.0789678
[133]   training's auc: 0.824187    training's binary_logloss: 0.077964 valid_1's auc: 0.787101 valid_1's binary_logloss: 0.0789585
[134]   training's auc: 0.824254    training's binary_logloss: 0.0779584    valid_1's auc: 0.787027 valid_1's binary_logloss: 0.0789541
[135]   training's auc: 0.824341    training's binary_logloss: 0.077952 valid_1's auc: 0.786869 valid_1's binary_logloss: 0.0789723
[136]   training's auc: 0.82438 training's binary_logloss: 0.0779474    valid_1's auc: 0.78692  valid_1's binary_logloss: 0.0789597
[137]   training's auc: 0.824406    training's binary_logloss: 0.0779433    valid_1's auc: 0.786874 valid_1's binary_logloss: 0.0789575
[138]   training's auc: 0.824454    training's binary_logloss: 0.0779352    valid_1's auc: 0.786895 valid_1's binary_logloss: 0.0789616
[139]   training's auc: 0.824473    training's binary_logloss: 0.0779328    valid_1's auc: 0.786893 valid_1's binary_logloss: 0.0789637
[140]   training's auc: 0.82449 training's binary_logloss: 0.0779292    valid_1's auc: 0.786926 valid_1's binary_logloss: 0.0789555
[141]   training's auc: 0.824514    training's binary_logloss: 0.0779257    valid_1's auc: 0.786878 valid_1's binary_logloss: 0.0789611
[142]   training's auc: 0.824559    training's binary_logloss: 0.0779182    valid_1's auc: 0.786888 valid_1's binary_logloss: 0.0789665
[143]   training's auc: 0.824643    training's binary_logloss: 0.0779107    valid_1's auc: 0.786796 valid_1's binary_logloss: 0.0789839
[144]   training's auc: 0.824719    training's binary_logloss: 0.0779031    valid_1's auc: 0.786829 valid_1's binary_logloss: 0.078982
[145]   training's auc: 0.824727    training's binary_logloss: 0.0778948    valid_1's auc: 0.786816 valid_1's binary_logloss: 0.078985
[146]   training's auc: 0.824755    training's binary_logloss: 0.0778917    valid_1's auc: 0.786763 valid_1's binary_logloss: 0.0789806
[147]   training's auc: 0.824772    training's binary_logloss: 0.0778878    valid_1's auc: 0.786758 valid_1's binary_logloss: 0.078983
[148]   training's auc: 0.824852    training's binary_logloss: 0.0778802    valid_1's auc: 0.786854 valid_1's binary_logloss: 0.0789847
[149]   training's auc: 0.824865    training's binary_logloss: 0.077876 valid_1's auc: 0.786902 valid_1's binary_logloss: 0.0789814
[150]   training's auc: 0.824915    training's binary_logloss: 0.0778703    valid_1's auc: 0.786859 valid_1's binary_logloss: 0.0789805
[151]   training's auc: 0.824947    training's binary_logloss: 0.0778578    valid_1's auc: 0.786807 valid_1's binary_logloss: 0.0789573
[152]   training's auc: 0.824974    training's binary_logloss: 0.0778552    valid_1's auc: 0.786779 valid_1's binary_logloss: 0.0789618
[153]   training's auc: 0.824993    training's binary_logloss: 0.0778495    valid_1's auc: 0.786761 valid_1's binary_logloss: 0.0789768
[154]   training's auc: 0.825031    training's binary_logloss: 0.0778448    valid_1's auc: 0.786767 valid_1's binary_logloss: 0.0789732
[155]   training's auc: 0.825142    training's binary_logloss: 0.0778363    valid_1's auc: 0.786753 valid_1's binary_logloss: 0.0789691
[156]   training's auc: 0.825185    training's binary_logloss: 0.0778336    valid_1's auc: 0.786824 valid_1's binary_logloss: 0.0789662
[157]   training's auc: 0.825244    training's binary_logloss: 0.077829 valid_1's auc: 0.786973 valid_1's binary_logloss: 0.078953
[158]   training's auc: 0.825257    training's binary_logloss: 0.0778269    valid_1's auc: 0.786975 valid_1's binary_logloss: 0.0789466
[159]   training's auc: 0.825286    training's binary_logloss: 0.077823 valid_1's auc: 0.78702  valid_1's binary_logloss: 0.0789322
[160]   training's auc: 0.825357    training's binary_logloss: 0.0778159    valid_1's auc: 0.78703  valid_1's binary_logloss: 0.0789328
[161]   training's auc: 0.825411    training's binary_logloss: 0.0778119    valid_1's auc: 0.78692  valid_1's binary_logloss: 0.0789383
[162]   training's auc: 0.825434    training's binary_logloss: 0.0778067    valid_1's auc: 0.786873 valid_1's binary_logloss: 0.0789497
[163]   training's auc: 0.825453    training's binary_logloss: 0.0778046    valid_1's auc: 0.786864 valid_1's binary_logloss: 0.0789537
[164]   training's auc: 0.825503    training's binary_logloss: 0.077798 valid_1's auc: 0.786776 valid_1's binary_logloss: 0.0789576
[165]   training's auc: 0.825541    training's binary_logloss: 0.0777844    valid_1's auc: 0.786773 valid_1's binary_logloss: 0.0789516
[166]   training's auc: 0.825539    training's binary_logloss: 0.0777775    valid_1's auc: 0.786653 valid_1's binary_logloss: 0.078943
[167]   training's auc: 0.825606    training's binary_logloss: 0.0777724    valid_1's auc: 0.786586 valid_1's binary_logloss: 0.0789509
[168]   training's auc: 0.825606    training's binary_logloss: 0.077771 valid_1's auc: 0.786555 valid_1's binary_logloss: 0.0789529
[169]   training's auc: 0.825623    training's binary_logloss: 0.0777661    valid_1's auc: 0.786428 valid_1's binary_logloss: 0.0789606
[170]   training's auc: 0.825644    training's binary_logloss: 0.0777637    valid_1's auc: 0.786396 valid_1's binary_logloss: 0.0789655
[171]   training's auc: 0.825668    training's binary_logloss: 0.0777602    valid_1's auc: 0.786352 valid_1's binary_logloss: 0.0789625
[172]   training's auc: 0.825707    training's binary_logloss: 0.0777449    valid_1's auc: 0.786399 valid_1's binary_logloss: 0.07897
[173]   training's auc: 0.825756    training's binary_logloss: 0.077737 valid_1's auc: 0.78632  valid_1's binary_logloss: 0.0789739
[174]   training's auc: 0.825777    training's binary_logloss: 0.077735 valid_1's auc: 0.786218 valid_1's binary_logloss: 0.0789728
[175]   training's auc: 0.825797    training's binary_logloss: 0.0777302    valid_1's auc: 0.786371 valid_1's binary_logloss: 0.0789586
[176]   training's auc: 0.82586 training's binary_logloss: 0.0777229    valid_1's auc: 0.786413 valid_1's binary_logloss: 0.0789549
[177]   training's auc: 0.82586 training's binary_logloss: 0.0777215    valid_1's auc: 0.786354 valid_1's binary_logloss: 0.0789554
[178]   training's auc: 0.825978    training's binary_logloss: 0.0777139    valid_1's auc: 0.786282 valid_1's binary_logloss: 0.0789642
[179]   training's auc: 0.826004    training's binary_logloss: 0.077712 valid_1's auc: 0.78624  valid_1's binary_logloss: 0.0789668
[180]   training's auc: 0.826015    training's binary_logloss: 0.0777086    valid_1's auc: 0.786241 valid_1's binary_logloss: 0.0789582
[181]   training's auc: 0.826025    training's binary_logloss: 0.0777058    valid_1's auc: 0.786223 valid_1's binary_logloss: 0.0789672
[182]   training's auc: 0.826054    training's binary_logloss: 0.0777037    valid_1's auc: 0.786208 valid_1's binary_logloss: 0.0789682
[183]   training's auc: 0.826064    training's binary_logloss: 0.0777011    valid_1's auc: 0.786239 valid_1's binary_logloss: 0.0789613
[184]   training's auc: 0.826124    training's binary_logloss: 0.0776982    valid_1's auc: 0.786293 valid_1's binary_logloss: 0.0789611
[185]   training's auc: 0.826139    training's binary_logloss: 0.0776903    valid_1's auc: 0.786263 valid_1's binary_logloss: 0.0789511
[186]   training's auc: 0.826179    training's binary_logloss: 0.0776848    valid_1's auc: 0.786334 valid_1's binary_logloss: 0.078947
[187]   training's auc: 0.826215    training's binary_logloss: 0.0776789    valid_1's auc: 0.786401 valid_1's binary_logloss: 0.0789423
[188]   training's auc: 0.826235    training's binary_logloss: 0.077676 valid_1's auc: 0.786488 valid_1's binary_logloss: 0.0789343
[189]   training's auc: 0.826282    training's binary_logloss: 0.0776715    valid_1's auc: 0.786471 valid_1's binary_logloss: 0.0789407
[190]   training's auc: 0.826278    training's binary_logloss: 0.0776701    valid_1's auc: 0.786426 valid_1's binary_logloss: 0.0789375
[191]   training's auc: 0.826325    training's binary_logloss: 0.0776637    valid_1's auc: 0.786376 valid_1's binary_logloss: 0.0789415
[192]   training's auc: 0.826328    training's binary_logloss: 0.0776618    valid_1's auc: 0.786346 valid_1's binary_logloss: 0.0789386
[193]   training's auc: 0.826437    training's binary_logloss: 0.0776509    valid_1's auc: 0.786426 valid_1's binary_logloss: 0.0789442
[194]   training's auc: 0.826493    training's binary_logloss: 0.0776317    valid_1's auc: 0.786441 valid_1's binary_logloss: 0.0789423
[195]   training's auc: 0.826518    training's binary_logloss: 0.0776291    valid_1's auc: 0.786308 valid_1's binary_logloss: 0.0789367
[196]   training's auc: 0.826582    training's binary_logloss: 0.0776236    valid_1's auc: 0.786229 valid_1's binary_logloss: 0.0789333
[197]   training's auc: 0.826606    training's binary_logloss: 0.0776201    valid_1's auc: 0.786179 valid_1's binary_logloss: 0.0789414
[198]   training's auc: 0.826633    training's binary_logloss: 0.0776174    valid_1's auc: 0.786172 valid_1's binary_logloss: 0.0789445
[199]   training's auc: 0.826654    training's binary_logloss: 0.0776125    valid_1's auc: 0.786157 valid_1's binary_logloss: 0.0789483
[200]   training's auc: 0.826726    training's binary_logloss: 0.0776032    valid_1's auc: 0.786308 valid_1's binary_logloss: 0.0789347
[201]   training's auc: 0.826736    training's binary_logloss: 0.0776011    valid_1's auc: 0.786306 valid_1's binary_logloss: 0.07893
[202]   training's auc: 0.826725    training's binary_logloss: 0.0775985    valid_1's auc: 0.78634  valid_1's binary_logloss: 0.0789325
[203]   training's auc: 0.826761    training's binary_logloss: 0.0775836    valid_1's auc: 0.786332 valid_1's binary_logloss: 0.0789319
[204]   training's auc: 0.826767    training's binary_logloss: 0.0775756    valid_1's auc: 0.786323 valid_1's binary_logloss: 0.0789358
[205]   training's auc: 0.826826    training's binary_logloss: 0.0775704    valid_1's auc: 0.78636  valid_1's binary_logloss: 0.0789362
[206]   training's auc: 0.826843    training's binary_logloss: 0.0775686    valid_1's auc: 0.786327 valid_1's binary_logloss: 0.0789443
[207]   training's auc: 0.826886    training's binary_logloss: 0.0775625    valid_1's auc: 0.786343 valid_1's binary_logloss: 0.0789469
[208]   training's auc: 0.826877    training's binary_logloss: 0.0775602    valid_1's auc: 0.786374 valid_1's binary_logloss: 0.0789392
[209]   training's auc: 0.826923    training's binary_logloss: 0.0775509    valid_1's auc: 0.786372 valid_1's binary_logloss: 0.0789281
[210]   training's auc: 0.826956    training's binary_logloss: 0.0775488    valid_1's auc: 0.786434 valid_1's binary_logloss: 0.0789274
[211]   training's auc: 0.827002    training's binary_logloss: 0.077542 valid_1's auc: 0.786457 valid_1's binary_logloss: 0.0789298
[212]   training's auc: 0.827065    training's binary_logloss: 0.0775354    valid_1's auc: 0.786537 valid_1's binary_logloss: 0.0789285
[213]   training's auc: 0.827099    training's binary_logloss: 0.077534 valid_1's auc: 0.78651  valid_1's binary_logloss: 0.0789412
[214]   training's auc: 0.827106    training's binary_logloss: 0.0775318    valid_1's auc: 0.786489 valid_1's binary_logloss: 0.078938
[215]   training's auc: 0.827147    training's binary_logloss: 0.0775288    valid_1's auc: 0.786432 valid_1's binary_logloss: 0.0789351
[216]   training's auc: 0.827218    training's binary_logloss: 0.0775226    valid_1's auc: 0.78638  valid_1's binary_logloss: 0.0789424
[217]   training's auc: 0.827248    training's binary_logloss: 0.0775197    valid_1's auc: 0.786353 valid_1's binary_logloss: 0.0789355
[218]   training's auc: 0.827266    training's binary_logloss: 0.0775178    valid_1's auc: 0.786303 valid_1's binary_logloss: 0.078938
[219]   training's auc: 0.827273    training's binary_logloss: 0.0775152    valid_1's auc: 0.786388 valid_1's binary_logloss: 0.0789348
[220]   training's auc: 0.827309    training's binary_logloss: 0.0775012    valid_1's auc: 0.786397 valid_1's binary_logloss: 0.0789287
[221]   training's auc: 0.827355    training's binary_logloss: 0.0774964    valid_1's auc: 0.786415 valid_1's binary_logloss: 0.0789237
[222]   training's auc: 0.827414    training's binary_logloss: 0.0774894    valid_1's auc: 0.786362 valid_1's binary_logloss: 0.0789336
[223]   training's auc: 0.827425    training's binary_logloss: 0.0774865    valid_1's auc: 0.786212 valid_1's binary_logloss: 0.0789445
[224]   training's auc: 0.827432    training's binary_logloss: 0.0774849    valid_1's auc: 0.786205 valid_1's binary_logloss: 0.0789539
[225]   training's auc: 0.827441    training's binary_logloss: 0.0774832    valid_1's auc: 0.786219 valid_1's binary_logloss: 0.0789549
[226]   training's auc: 0.827478    training's binary_logloss: 0.0774814    valid_1's auc: 0.786104 valid_1's binary_logloss: 0.0789654
[227]   training's auc: 0.827504    training's binary_logloss: 0.0774766    valid_1's auc: 0.786015 valid_1's binary_logloss: 0.078965
[228]   training's auc: 0.827541    training's binary_logloss: 0.0774724    valid_1's auc: 0.786071 valid_1's binary_logloss: 0.0789723
[229]   training's auc: 0.827635    training's binary_logloss: 0.0774622    valid_1's auc: 0.78603  valid_1's binary_logloss: 0.0789772
[230]   training's auc: 0.827692    training's binary_logloss: 0.0774549    valid_1's auc: 0.78611  valid_1's binary_logloss: 0.0789837
[231]   training's auc: 0.827744    training's binary_logloss: 0.0774514    valid_1's auc: 0.786075 valid_1's binary_logloss: 0.0789911
[232]   training's auc: 0.82773 training's binary_logloss: 0.0774475    valid_1's auc: 0.786088 valid_1's binary_logloss: 0.0789787
[233]   training's auc: 0.827737    training's binary_logloss: 0.0774452    valid_1's auc: 0.786147 valid_1's binary_logloss: 0.0789679
Early stopping, best iteration is:
[133]   training's auc: 0.824187    training's binary_logloss: 0.077964 valid_1's auc: 0.787101 valid_1's binary_logloss: 0.0789585
8
train_ks:  0.48924745315875107
test_ks:  0.47139484384477426
ft_lst:  ['finance_info', 'act_info', 'person_info', 'credit_info']
lst = ['person_info','finance_info','credit_info','act_info']

train = data[data.obs_mth != '2018-11-30'].reset_index().copy()
evl = data[data.obs_mth == '2018-11-30'].reset_index().copy()

x = train[lst]
y = train['bad_ind']

evl_x =  evl[lst]
evl_y = evl['bad_ind']

model,auc = LGB_test(x,y,evl_x,evl_y)

y_pred = model.predict_proba(x)[:,1]
fpr_lgb_train,tpr_lgb_train,_ = roc_curve(y,y_pred)
train_ks = abs(fpr_lgb_train - tpr_lgb_train).max()
print('train_ks : ',train_ks)

y_pred = model.predict_proba(evl_x)[:,1]
fpr_lgb,tpr_lgb,_ = roc_curve(evl_y,y_pred)
evl_ks = abs(fpr_lgb - tpr_lgb).max()
print('evl_ks : ',evl_ks)

from matplotlib import pyplot as plt
plt.plot(fpr_lgb_train,tpr_lgb_train,label = 'train LR')
plt.plot(fpr_lgb,tpr_lgb,label = 'evl LR')
plt.plot([0,1],[0,1],'k--')
plt.xlabel('False positive rate')
plt.ylabel('True positive rate')
plt.title('ROC Curve')
plt.legend(loc = 'best')
plt.show()
[1] training's auc: 0.712883    training's binary_logloss: 0.0902863    valid_1's auc: 0.678619 valid_1's binary_logloss: 0.0986249
Training until validation scores don't improve for 100 rounds.
[2] training's auc: 0.779422    training's binary_logloss: 0.0896631    valid_1's auc: 0.755386 valid_1's binary_logloss: 0.0979446
[3] training's auc: 0.796859    training's binary_logloss: 0.0884581    valid_1's auc: 0.766644 valid_1's binary_logloss: 0.0966552
[4] training's auc: 0.793759    training's binary_logloss: 0.0879312    valid_1's auc: 0.770979 valid_1's binary_logloss: 0.0960961
[5] training's auc: 0.799951    training's binary_logloss: 0.0870815    valid_1's auc: 0.771334 valid_1's binary_logloss: 0.0953262
[6] training's auc: 0.794522    training's binary_logloss: 0.0866877    valid_1's auc: 0.77294  valid_1's binary_logloss: 0.0949284
[7] training's auc: 0.798617    training's binary_logloss: 0.0859757    valid_1's auc: 0.772861 valid_1's binary_logloss: 0.0942325
[8] training's auc: 0.797511    training's binary_logloss: 0.0856484    valid_1's auc: 0.772569 valid_1's binary_logloss: 0.0939176
[9] training's auc: 0.79827 training's binary_logloss: 0.0850742    valid_1's auc: 0.772891 valid_1's binary_logloss: 0.0933913
[10]    training's auc: 0.799088    training's binary_logloss: 0.0847825    valid_1's auc: 0.77281  valid_1's binary_logloss: 0.0931575
[11]    training's auc: 0.79966 training's binary_logloss: 0.0842762    valid_1's auc: 0.773554 valid_1's binary_logloss: 0.0926702
[12]    training's auc: 0.799968    training's binary_logloss: 0.0840431    valid_1's auc: 0.774168 valid_1's binary_logloss: 0.092494
[13]    training's auc: 0.802731    training's binary_logloss: 0.0836309    valid_1's auc: 0.774418 valid_1's binary_logloss: 0.0921818
[14]    training's auc: 0.801762    training's binary_logloss: 0.0834125    valid_1's auc: 0.77673  valid_1's binary_logloss: 0.0919847
[15]    training's auc: 0.803912    training's binary_logloss: 0.0830492    valid_1's auc: 0.774707 valid_1's binary_logloss: 0.0917197
[16]    training's auc: 0.803406    training's binary_logloss: 0.0828779    valid_1's auc: 0.77519  valid_1's binary_logloss: 0.0915828
[17]    training's auc: 0.804264    training's binary_logloss: 0.0825632    valid_1's auc: 0.775024 valid_1's binary_logloss: 0.0913537
[18]    training's auc: 0.803802    training's binary_logloss: 0.0824116    valid_1's auc: 0.774978 valid_1's binary_logloss: 0.0912317
[19]    training's auc: 0.805187    training's binary_logloss: 0.082129 valid_1's auc: 0.774492 valid_1's binary_logloss: 0.091111
[20]    training's auc: 0.805259    training's binary_logloss: 0.0819953    valid_1's auc: 0.774937 valid_1's binary_logloss: 0.0910314
[21]    training's auc: 0.806136    training's binary_logloss: 0.0817592    valid_1's auc: 0.775148 valid_1's binary_logloss: 0.0909239
[22]    training's auc: 0.806158    training's binary_logloss: 0.0816511    valid_1's auc: 0.77573  valid_1's binary_logloss: 0.0908629
[23]    training's auc: 0.806679    training's binary_logloss: 0.08145  valid_1's auc: 0.775596 valid_1's binary_logloss: 0.0907771
[24]    training's auc: 0.806419    training's binary_logloss: 0.0813543    valid_1's auc: 0.776032 valid_1's binary_logloss: 0.0907198
[25]    training's auc: 0.806751    training's binary_logloss: 0.0811696    valid_1's auc: 0.775598 valid_1's binary_logloss: 0.0906547
[26]    training's auc: 0.8065  training's binary_logloss: 0.08107  valid_1's auc: 0.776031 valid_1's binary_logloss: 0.0905925
[27]    training's auc: 0.806891    training's binary_logloss: 0.0809137    valid_1's auc: 0.775924 valid_1's binary_logloss: 0.0905542
[28]    training's auc: 0.806718    training's binary_logloss: 0.0808341    valid_1's auc: 0.776476 valid_1's binary_logloss: 0.0905119
[29]    training's auc: 0.807206    training's binary_logloss: 0.0806913    valid_1's auc: 0.776453 valid_1's binary_logloss: 0.0904315
[30]    training's auc: 0.80706 training's binary_logloss: 0.080629 valid_1's auc: 0.776512 valid_1's binary_logloss: 0.0903921
[31]    training's auc: 0.807298    training's binary_logloss: 0.0805036    valid_1's auc: 0.776524 valid_1's binary_logloss: 0.0903695
[32]    training's auc: 0.807173    training's binary_logloss: 0.080443 valid_1's auc: 0.77697  valid_1's binary_logloss: 0.0903384
[33]    training's auc: 0.807918    training's binary_logloss: 0.0803269    valid_1's auc: 0.777455 valid_1's binary_logloss: 0.0902696
[34]    training's auc: 0.807757    training's binary_logloss: 0.0802784    valid_1's auc: 0.777844 valid_1's binary_logloss: 0.0902539
[35]    training's auc: 0.808269    training's binary_logloss: 0.080179 valid_1's auc: 0.777772 valid_1's binary_logloss: 0.0902162
[36]    training's auc: 0.808237    training's binary_logloss: 0.0801379    valid_1's auc: 0.777754 valid_1's binary_logloss: 0.0902106
[37]    training's auc: 0.80852 training's binary_logloss: 0.0800512    valid_1's auc: 0.777562 valid_1's binary_logloss: 0.0901705
[38]    training's auc: 0.808361    training's binary_logloss: 0.0800092    valid_1's auc: 0.777544 valid_1's binary_logloss: 0.090157
[39]    training's auc: 0.808478    training's binary_logloss: 0.0799318    valid_1's auc: 0.777332 valid_1's binary_logloss: 0.0901297
[40]    training's auc: 0.808321    training's binary_logloss: 0.079893 valid_1's auc: 0.777233 valid_1's binary_logloss: 0.0901127
[41]    training's auc: 0.808999    training's binary_logloss: 0.0798096    valid_1's auc: 0.777827 valid_1's binary_logloss: 0.0901137
[42]    training's auc: 0.808798    training's binary_logloss: 0.0797778    valid_1's auc: 0.777977 valid_1's binary_logloss: 0.0901147
[43]    training's auc: 0.809328    training's binary_logloss: 0.0797034    valid_1's auc: 0.778392 valid_1's binary_logloss: 0.0901236
[44]    training's auc: 0.80925 training's binary_logloss: 0.079672 valid_1's auc: 0.777956 valid_1's binary_logloss: 0.0901046
[45]    training's auc: 0.809647    training's binary_logloss: 0.0796166    valid_1's auc: 0.778165 valid_1's binary_logloss: 0.0900778
[46]    training's auc: 0.809439    training's binary_logloss: 0.0795896    valid_1's auc: 0.778471 valid_1's binary_logloss: 0.0900589
[47]    training's auc: 0.80967 training's binary_logloss: 0.0795382    valid_1's auc: 0.778407 valid_1's binary_logloss: 0.0900608
[48]    training's auc: 0.809632    training's binary_logloss: 0.0795118    valid_1's auc: 0.778635 valid_1's binary_logloss: 0.0900545
[49]    training's auc: 0.810054    training's binary_logloss: 0.0794537    valid_1's auc: 0.778953 valid_1's binary_logloss: 0.0900595
[50]    training's auc: 0.809988    training's binary_logloss: 0.0794355    valid_1's auc: 0.778552 valid_1's binary_logloss: 0.0900536
[51]    training's auc: 0.809984    training's binary_logloss: 0.0793886    valid_1's auc: 0.778777 valid_1's binary_logloss: 0.0900401
[52]    training's auc: 0.809912    training's binary_logloss: 0.0793728    valid_1's auc: 0.778465 valid_1's binary_logloss: 0.0900442
[53]    training's auc: 0.810352    training's binary_logloss: 0.0793299    valid_1's auc: 0.778948 valid_1's binary_logloss: 0.0900201
[54]    training's auc: 0.810297    training's binary_logloss: 0.0793164    valid_1's auc: 0.779015 valid_1's binary_logloss: 0.0900228
[55]    training's auc: 0.810648    training's binary_logloss: 0.0792686    valid_1's auc: 0.778837 valid_1's binary_logloss: 0.0900379
[56]    training's auc: 0.810507    training's binary_logloss: 0.0792537    valid_1's auc: 0.778843 valid_1's binary_logloss: 0.0900383
[57]    training's auc: 0.810743    training's binary_logloss: 0.0792137    valid_1's auc: 0.779213 valid_1's binary_logloss: 0.090062
[58]    training's auc: 0.810843    training's binary_logloss: 0.0792009    valid_1's auc: 0.778467 valid_1's binary_logloss: 0.0900665
[59]    training's auc: 0.8111  training's binary_logloss: 0.0791657    valid_1's auc: 0.778908 valid_1's binary_logloss: 0.0900436
[60]    training's auc: 0.811013    training's binary_logloss: 0.0791521    valid_1's auc: 0.779083 valid_1's binary_logloss: 0.0900454
[61]    training's auc: 0.81115 training's binary_logloss: 0.0791238    valid_1's auc: 0.779218 valid_1's binary_logloss: 0.0900303
[62]    training's auc: 0.81117 training's binary_logloss: 0.0791089    valid_1's auc: 0.779171 valid_1's binary_logloss: 0.0900258
[63]    training's auc: 0.811327    training's binary_logloss: 0.0790788    valid_1's auc: 0.7791   valid_1's binary_logloss: 0.0900106
[64]    training's auc: 0.811326    training's binary_logloss: 0.079069 valid_1's auc: 0.779232 valid_1's binary_logloss: 0.0900223
[65]    training's auc: 0.811449    training's binary_logloss: 0.0790371    valid_1's auc: 0.779741 valid_1's binary_logloss: 0.0900352
[66]    training's auc: 0.811437    training's binary_logloss: 0.0790276    valid_1's auc: 0.779734 valid_1's binary_logloss: 0.0900245
[67]    training's auc: 0.811536    training's binary_logloss: 0.0789994    valid_1's auc: 0.779581 valid_1's binary_logloss: 0.0900515
[68]    training's auc: 0.811479    training's binary_logloss: 0.0789909    valid_1's auc: 0.779614 valid_1's binary_logloss: 0.0900465
[69]    training's auc: 0.811596    training's binary_logloss: 0.0789654    valid_1's auc: 0.779766 valid_1's binary_logloss: 0.0900401
[70]    training's auc: 0.811633    training's binary_logloss: 0.078961 valid_1's auc: 0.779946 valid_1's binary_logloss: 0.0900291
[71]    training's auc: 0.811785    training's binary_logloss: 0.0789392    valid_1's auc: 0.780212 valid_1's binary_logloss: 0.0900128
[72]    training's auc: 0.811745    training's binary_logloss: 0.0789321    valid_1's auc: 0.780268 valid_1's binary_logloss: 0.0900058
[73]    training's auc: 0.811867    training's binary_logloss: 0.0789131    valid_1's auc: 0.780366 valid_1's binary_logloss: 0.0899873
[74]    training's auc: 0.81177 training's binary_logloss: 0.0789059    valid_1's auc: 0.780388 valid_1's binary_logloss: 0.0899878
[75]    training's auc: 0.811873    training's binary_logloss: 0.0788864    valid_1's auc: 0.78033  valid_1's binary_logloss: 0.0900109
[76]    training's auc: 0.811944    training's binary_logloss: 0.0788774    valid_1's auc: 0.780221 valid_1's binary_logloss: 0.0900063
[77]    training's auc: 0.812065    training's binary_logloss: 0.0788603    valid_1's auc: 0.780391 valid_1's binary_logloss: 0.0899994
[78]    training's auc: 0.812154    training's binary_logloss: 0.0788576    valid_1's auc: 0.780332 valid_1's binary_logloss: 0.0900096
[79]    training's auc: 0.812294    training's binary_logloss: 0.0788368    valid_1's auc: 0.780343 valid_1's binary_logloss: 0.0900513
[80]    training's auc: 0.812266    training's binary_logloss: 0.0788331    valid_1's auc: 0.780413 valid_1's binary_logloss: 0.0900495
[81]    training's auc: 0.812309    training's binary_logloss: 0.0788191    valid_1's auc: 0.780574 valid_1's binary_logloss: 0.0900269
[82]    training's auc: 0.812403    training's binary_logloss: 0.0788113    valid_1's auc: 0.780618 valid_1's binary_logloss: 0.0900231
[83]    training's auc: 0.812473    training's binary_logloss: 0.0787966    valid_1's auc: 0.780246 valid_1's binary_logloss: 0.0900257
[84]    training's auc: 0.812372    training's binary_logloss: 0.0787897    valid_1's auc: 0.780686 valid_1's binary_logloss: 0.0900233
[85]    training's auc: 0.812404    training's binary_logloss: 0.0787757    valid_1's auc: 0.780809 valid_1's binary_logloss: 0.0900316
[86]    training's auc: 0.812445    training's binary_logloss: 0.0787704    valid_1's auc: 0.780236 valid_1's binary_logloss: 0.0900217
[87]    training's auc: 0.812461    training's binary_logloss: 0.0787592    valid_1's auc: 0.78038  valid_1's binary_logloss: 0.0900165
[88]    training's auc: 0.812455    training's binary_logloss: 0.0787545    valid_1's auc: 0.780422 valid_1's binary_logloss: 0.090014
[89]    training's auc: 0.812429    training's binary_logloss: 0.0787446    valid_1's auc: 0.780449 valid_1's binary_logloss: 0.0900218
[90]    training's auc: 0.812471    training's binary_logloss: 0.078741 valid_1's auc: 0.780441 valid_1's binary_logloss: 0.0900193
[91]    training's auc: 0.812505    training's binary_logloss: 0.0787315    valid_1's auc: 0.780464 valid_1's binary_logloss: 0.0900318
[92]    training's auc: 0.812453    training's binary_logloss: 0.0787265    valid_1's auc: 0.780312 valid_1's binary_logloss: 0.0900344
[93]    training's auc: 0.812515    training's binary_logloss: 0.0787126    valid_1's auc: 0.780252 valid_1's binary_logloss: 0.0900471
[94]    training's auc: 0.812504    training's binary_logloss: 0.0787088    valid_1's auc: 0.780297 valid_1's binary_logloss: 0.0900514
[95]    training's auc: 0.812523    training's binary_logloss: 0.0786988    valid_1's auc: 0.780321 valid_1's binary_logloss: 0.0900479
[96]    training's auc: 0.812497    training's binary_logloss: 0.0786947    valid_1's auc: 0.780363 valid_1's binary_logloss: 0.0900411
[97]    training's auc: 0.812553    training's binary_logloss: 0.0786835    valid_1's auc: 0.780308 valid_1's binary_logloss: 0.0900571
[98]    training's auc: 0.812572    training's binary_logloss: 0.0786808    valid_1's auc: 0.780397 valid_1's binary_logloss: 0.0900602
[99]    training's auc: 0.812617    training's binary_logloss: 0.0786723    valid_1's auc: 0.780379 valid_1's binary_logloss: 0.0900683
[100]   training's auc: 0.81259 training's binary_logloss: 0.0786651    valid_1's auc: 0.780636 valid_1's binary_logloss: 0.0900653
[101]   training's auc: 0.81264 training's binary_logloss: 0.0786558    valid_1's auc: 0.780761 valid_1's binary_logloss: 0.0900662
[102]   training's auc: 0.812602    training's binary_logloss: 0.0786516    valid_1's auc: 0.780722 valid_1's binary_logloss: 0.09007
[103]   training's auc: 0.812648    training's binary_logloss: 0.0786414    valid_1's auc: 0.780776 valid_1's binary_logloss: 0.0900735
[104]   training's auc: 0.812667    training's binary_logloss: 0.0786374    valid_1's auc: 0.780811 valid_1's binary_logloss: 0.0900674
[105]   training's auc: 0.812732    training's binary_logloss: 0.0786315    valid_1's auc: 0.780832 valid_1's binary_logloss: 0.0900778
[106]   training's auc: 0.812718    training's binary_logloss: 0.0786293    valid_1's auc: 0.78075  valid_1's binary_logloss: 0.090078
[107]   training's auc: 0.812768    training's binary_logloss: 0.0786234    valid_1's auc: 0.780745 valid_1's binary_logloss: 0.0900803
[108]   training's auc: 0.812723    training's binary_logloss: 0.07862  valid_1's auc: 0.780829 valid_1's binary_logloss: 0.0900856
[109]   training's auc: 0.812755    training's binary_logloss: 0.0786136    valid_1's auc: 0.780775 valid_1's binary_logloss: 0.0900877
[110]   training's auc: 0.812742    training's binary_logloss: 0.0786126    valid_1's auc: 0.780712 valid_1's binary_logloss: 0.0900845
[111]   training's auc: 0.812729    training's binary_logloss: 0.0786033    valid_1's auc: 0.780761 valid_1's binary_logloss: 0.0901134
[112]   training's auc: 0.812837    training's binary_logloss: 0.0785951    valid_1's auc: 0.780808 valid_1's binary_logloss: 0.0900995
[113]   training's auc: 0.812832    training's binary_logloss: 0.0785882    valid_1's auc: 0.780903 valid_1's binary_logloss: 0.0900979
[114]   training's auc: 0.812831    training's binary_logloss: 0.0785864    valid_1's auc: 0.780839 valid_1's binary_logloss: 0.0900861
[115]   training's auc: 0.812845    training's binary_logloss: 0.0785775    valid_1's auc: 0.780897 valid_1's binary_logloss: 0.0900934
[116]   training's auc: 0.81284 training's binary_logloss: 0.0785754    valid_1's auc: 0.780876 valid_1's binary_logloss: 0.0900937
[117]   training's auc: 0.812856    training's binary_logloss: 0.078571 valid_1's auc: 0.780955 valid_1's binary_logloss: 0.0900814
[118]   training's auc: 0.812851    training's binary_logloss: 0.0785666    valid_1's auc: 0.780996 valid_1's binary_logloss: 0.0900797
[119]   training's auc: 0.81277 training's binary_logloss: 0.0785601    valid_1's auc: 0.780963 valid_1's binary_logloss: 0.0900779
[120]   training's auc: 0.812778    training's binary_logloss: 0.078558 valid_1's auc: 0.780911 valid_1's binary_logloss: 0.0900911
[121]   training's auc: 0.812798    training's binary_logloss: 0.0785529    valid_1's auc: 0.780961 valid_1's binary_logloss: 0.090077
[122]   training's auc: 0.81279 training's binary_logloss: 0.0785459    valid_1's auc: 0.78113  valid_1's binary_logloss: 0.0900695
[123]   training's auc: 0.812796    training's binary_logloss: 0.0785429    valid_1's auc: 0.781147 valid_1's binary_logloss: 0.0900673
[124]   training's auc: 0.812858    training's binary_logloss: 0.0785361    valid_1's auc: 0.780897 valid_1's binary_logloss: 0.0900794
[125]   training's auc: 0.812844    training's binary_logloss: 0.0785307    valid_1's auc: 0.780882 valid_1's binary_logloss: 0.0900767
[126]   training's auc: 0.812903    training's binary_logloss: 0.0785301    valid_1's auc: 0.780944 valid_1's binary_logloss: 0.0900859
[127]   training's auc: 0.812944    training's binary_logloss: 0.0785268    valid_1's auc: 0.780996 valid_1's binary_logloss: 0.0900713
[128]   training's auc: 0.812963    training's binary_logloss: 0.0785244    valid_1's auc: 0.78103  valid_1's binary_logloss: 0.0900753
[129]   training's auc: 0.812938    training's binary_logloss: 0.078522 valid_1's auc: 0.781035 valid_1's binary_logloss: 0.0900724
[130]   training's auc: 0.812876    training's binary_logloss: 0.07852  valid_1's auc: 0.781049 valid_1's binary_logloss: 0.0900627
[131]   training's auc: 0.812863    training's binary_logloss: 0.0785178    valid_1's auc: 0.780816 valid_1's binary_logloss: 0.090063
[132]   training's auc: 0.812864    training's binary_logloss: 0.0785163    valid_1's auc: 0.780828 valid_1's binary_logloss: 0.0900533
[133]   training's auc: 0.812929    training's binary_logloss: 0.0785118    valid_1's auc: 0.780818 valid_1's binary_logloss: 0.0900774
[134]   training's auc: 0.813009    training's binary_logloss: 0.0785086    valid_1's auc: 0.781097 valid_1's binary_logloss: 0.0900723
[135]   training's auc: 0.813014    training's binary_logloss: 0.0785045    valid_1's auc: 0.781128 valid_1's binary_logloss: 0.0900665
[136]   training's auc: 0.812995    training's binary_logloss: 0.0785039    valid_1's auc: 0.781106 valid_1's binary_logloss: 0.0900689
[137]   training's auc: 0.813008    training's binary_logloss: 0.0785006    valid_1's auc: 0.781143 valid_1's binary_logloss: 0.0900677
[138]   training's auc: 0.813005    training's binary_logloss: 0.0784957    valid_1's auc: 0.780823 valid_1's binary_logloss: 0.0900726
[139]   training's auc: 0.813013    training's binary_logloss: 0.0784936    valid_1's auc: 0.780812 valid_1's binary_logloss: 0.0900736
[140]   training's auc: 0.812997    training's binary_logloss: 0.0784911    valid_1's auc: 0.780735 valid_1's binary_logloss: 0.0900827
[141]   training's auc: 0.812913    training's binary_logloss: 0.0784877    valid_1's auc: 0.780716 valid_1's binary_logloss: 0.0900888
[142]   training's auc: 0.812945    training's binary_logloss: 0.0784862    valid_1's auc: 0.780709 valid_1's binary_logloss: 0.0900787
[143]   training's auc: 0.812897    training's binary_logloss: 0.0784826    valid_1's auc: 0.780662 valid_1's binary_logloss: 0.090093
[144]   training's auc: 0.812869    training's binary_logloss: 0.0784802    valid_1's auc: 0.780654 valid_1's binary_logloss: 0.0901036
[145]   training's auc: 0.812826    training's binary_logloss: 0.0784779    valid_1's auc: 0.780676 valid_1's binary_logloss: 0.0901069
[146]   training's auc: 0.812835    training's binary_logloss: 0.0784763    valid_1's auc: 0.780666 valid_1's binary_logloss: 0.0900979
[147]   training's auc: 0.812833    training's binary_logloss: 0.0784755    valid_1's auc: 0.780662 valid_1's binary_logloss: 0.0900873
[148]   training's auc: 0.812756    training's binary_logloss: 0.0784742    valid_1's auc: 0.780615 valid_1's binary_logloss: 0.0900852
[149]   training's auc: 0.812782    training's binary_logloss: 0.0784723    valid_1's auc: 0.780662 valid_1's binary_logloss: 0.0900806
[150]   training's auc: 0.812803    training's binary_logloss: 0.07847  valid_1's auc: 0.78073  valid_1's binary_logloss: 0.0900796
[151]   training's auc: 0.812806    training's binary_logloss: 0.0784677    valid_1's auc: 0.780802 valid_1's binary_logloss: 0.0900699
[152]   training's auc: 0.812803    training's binary_logloss: 0.0784665    valid_1's auc: 0.780772 valid_1's binary_logloss: 0.0900722
[153]   training's auc: 0.812779    training's binary_logloss: 0.0784648    valid_1's auc: 0.780778 valid_1's binary_logloss: 0.0900689
[154]   training's auc: 0.81282 training's binary_logloss: 0.07846  valid_1's auc: 0.780537 valid_1's binary_logloss: 0.0900787
[155]   training's auc: 0.812817    training's binary_logloss: 0.0784579    valid_1's auc: 0.780572 valid_1's binary_logloss: 0.0900729
[156]   training's auc: 0.812844    training's binary_logloss: 0.0784568    valid_1's auc: 0.780582 valid_1's binary_logloss: 0.090063
[157]   training's auc: 0.81285 training's binary_logloss: 0.078456 valid_1's auc: 0.780546 valid_1's binary_logloss: 0.0900826
[158]   training's auc: 0.812861    training's binary_logloss: 0.0784499    valid_1's auc: 0.780467 valid_1's binary_logloss: 0.0900775
[159]   training's auc: 0.812793    training's binary_logloss: 0.0784475    valid_1's auc: 0.780521 valid_1's binary_logloss: 0.09009
[160]   training's auc: 0.812859    training's binary_logloss: 0.0784436    valid_1's auc: 0.780489 valid_1's binary_logloss: 0.0900845
[161]   training's auc: 0.812846    training's binary_logloss: 0.078442 valid_1's auc: 0.780548 valid_1's binary_logloss: 0.0900847
[162]   training's auc: 0.812833    training's binary_logloss: 0.078441 valid_1's auc: 0.780561 valid_1's binary_logloss: 0.0900856
[163]   training's auc: 0.812783    training's binary_logloss: 0.0784386    valid_1's auc: 0.780523 valid_1's binary_logloss: 0.0900987
[164]   training's auc: 0.812873    training's binary_logloss: 0.0784353    valid_1's auc: 0.780579 valid_1's binary_logloss: 0.0900967
[165]   training's auc: 0.812865    training's binary_logloss: 0.0784342    valid_1's auc: 0.780603 valid_1's binary_logloss: 0.0901074
[166]   training's auc: 0.812847    training's binary_logloss: 0.0784317    valid_1's auc: 0.780571 valid_1's binary_logloss: 0.0900896
[167]   training's auc: 0.812823    training's binary_logloss: 0.0784293    valid_1's auc: 0.780457 valid_1's binary_logloss: 0.0900842
[168]   training's auc: 0.812859    training's binary_logloss: 0.0784281    valid_1's auc: 0.780467 valid_1's binary_logloss: 0.0900965
[169]   training's auc: 0.812886    training's binary_logloss: 0.0784263    valid_1's auc: 0.780444 valid_1's binary_logloss: 0.0901021
[170]   training's auc: 0.812915    training's binary_logloss: 0.0784253    valid_1's auc: 0.780471 valid_1's binary_logloss: 0.0901108
[171]   training's auc: 0.812892    training's binary_logloss: 0.0784248    valid_1's auc: 0.780492 valid_1's binary_logloss: 0.0901005
[172]   training's auc: 0.812906    training's binary_logloss: 0.0784215    valid_1's auc: 0.780465 valid_1's binary_logloss: 0.0900966
[173]   training's auc: 0.812882    training's binary_logloss: 0.0784194    valid_1's auc: 0.780487 valid_1's binary_logloss: 0.0900905
Early stopping, best iteration is:
[73]    training's auc: 0.811867    training's binary_logloss: 0.0789131    valid_1's auc: 0.780366 valid_1's binary_logloss: 0.0899873
4
train_ks :  0.4790454817217402
evl_ks :  0.4361379178117211



<Figure size 640x480 with 1 Axes>

LightGBM其实效果确实是比较LR要好的,但是我们LR也可以逼近这个效果,下节课我们会具体来做。

# ['person_info','finance_info','credit_info','act_info']
# 算分数onekey 
def score(xbeta):
    score = 1000-500*(math.log2(1-xbeta)/xbeta)  # 好人的概率/坏人的概率
    return score
evl['xbeta'] = model.predict_proba(evl_x)[:,1]   
evl['score'] = evl.apply(lambda x : score(x.xbeta) ,axis=1)
fpr_lr,tpr_lr,_ = roc_curve(evl_y,evl['score'])
evl_ks = abs(fpr_lr - tpr_lr).max()
print('val_ks : ',evl_ks)
val_ks :  0.4361379178117211
#生成报告
row_num, col_num = 0, 0
bins = 20
Y_predict = evl['score']
Y = evl_y
nrows = Y.shape[0]
lis = [(Y_predict[i], Y[i]) for i in range(nrows)]
ks_lis = sorted(lis, key=lambda x: x[0], reverse=True)
bin_num = int(nrows/bins+1)
bad = sum([1 for (p, y) in ks_lis if y > 0.5])
good = sum([1 for (p, y) in ks_lis if y <= 0.5])
bad_cnt, good_cnt = 0, 0
KS = []
BAD = []
GOOD = []
BAD_CNT = []
GOOD_CNT = []
BAD_PCTG = []
BADRATE = []
dct_report = {}
for j in range(bins):
    ds = ks_lis[j*bin_num: min((j+1)*bin_num, nrows)]
    bad1 = sum([1 for (p, y) in ds if y > 0.5])
    good1 = sum([1 for (p, y) in ds if y <= 0.5])
    bad_cnt += bad1
    good_cnt += good1
    bad_pctg = round(bad_cnt/sum(evl_y),3)
    badrate = round(bad1/(bad1+good1),3)
    ks = round(math.fabs((bad_cnt / bad) - (good_cnt / good)),3)
    KS.append(ks)
    BAD.append(bad1)
    GOOD.append(good1)
    BAD_CNT.append(bad_cnt)
    GOOD_CNT.append(good_cnt)
    BAD_PCTG.append(bad_pctg)
    BADRATE.append(badrate)
    dct_report['KS'] = KS
    dct_report['BAD'] = BAD
    dct_report['GOOD'] = GOOD
    dct_report['BAD_CNT'] = BAD_CNT
    dct_report['GOOD_CNT'] = GOOD_CNT
    dct_report['BAD_PCTG'] = BAD_PCTG
    dct_report['BADRATE'] = BADRATE
val_repot = pd.DataFrame(dct_report)
val_repot
KS BAD GOOD BAD_CNT GOOD_CNT BAD_PCTG BADRATE
0 0.229 90 709 90 709 0.274 0.113
1 0.271 30 769 120 1478 0.366 0.038
2 0.348 41 758 161 2236 0.491 0.051
3 0.381 27 772 188 3008 0.573 0.034
4 0.398 22 777 210 3785 0.640 0.028
5 0.419 23 776 233 4561 0.710 0.029
6 0.424 18 781 251 5342 0.765 0.023
7 0.407 11 788 262 6130 0.799 0.014
8 0.387 10 789 272 6919 0.829 0.013
9 0.380 14 785 286 7704 0.872 0.018
10 0.357 9 790 295 8494 0.899 0.011
11 0.340 11 788 306 9282 0.933 0.014
12 0.301 4 795 310 10077 0.945 0.005
13 0.262 4 795 314 10872 0.957 0.005
14 0.221 3 796 317 11668 0.966 0.004
15 0.182 4 795 321 12463 0.979 0.005
16 0.137 2 797 323 13260 0.985 0.003
17 0.092 2 797 325 14057 0.991 0.003
18 0.041 0 799 325 14856 0.991 0.000
19 0.000 3 791 328 15647 1.000 0.004
from pyecharts.charts import *
from pyecharts import options as opts
from pylab import *
mpl.rcParams['font.sans-serif'] = ['SimHei']
np.set_printoptions(suppress=True)
pd.set_option('display.unicode.ambiguous_as_wide', True)
pd.set_option('display.unicode.east_asian_width', True)
line = (

    Line()
    .add_xaxis(list(val_repot.index))
    .add_yaxis(
        "分组坏人占比",
        list(val_repot.BADRATE),
        yaxis_index=0,
        color="red",
    )
    .set_global_opts(
        title_opts=opts.TitleOpts(title="行为评分卡模型表现"),
    )
    .extend_axis(
        yaxis=opts.AxisOpts(
            name="累计坏人占比",
            type_="value",
            min_=0,
            max_=0.5,
            position="right",
            axisline_opts=opts.AxisLineOpts(
                linestyle_opts=opts.LineStyleOpts(color="red")
            ),
            axislabel_opts=opts.LabelOpts(formatter="{value}"),
        )

    )
    .add_xaxis(list(val_repot.index))
    .add_yaxis(
        "KS",
        list(val_repot['KS']),
        yaxis_index=1,
        color="blue",
        label_opts=opts.LabelOpts(is_show=False),
    )
)
line.render_notebook()
<div id="4443c6c5e7f04a84b2c462473c25c6a0" style="width:900px; height:500px;"></div>

标签:binary,auc,LightGBM,评分,train,training,logloss,valid
来源: https://www.cnblogs.com/chenxiangzhen/p/10963115.html