python – 当我将它设置为300时,为什么keras只执行10个时期?
作者:互联网
我正在使用sklearn和Keras的组合与Theano作为后端运行.我正在使用以下代码 –
import numpy as np
import pandas as pd
from pandas import Series, DataFrame
import keras
from keras.callbacks import EarlyStopping, ModelCheckpoint
from keras.constraints import maxnorm
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.optimizers import SGD
from keras.wrappers.scikit_learn import KerasClassifier
from keras.constraints import maxnorm
from keras.utils.np_utils import to_categorical
from sklearn.model_selection import cross_val_score
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import StratifiedKFold
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import Pipeline
from sklearn.model_selection import train_test_split
from datetime import datetime
import time
from datetime import timedelta
from __future__ import division
seed = 7
np.random.seed(seed)
Y = data['Genre']
del data['Genre']
X = data
encoder = LabelEncoder()
encoder.fit(Y)
encoded_Y = encoder.transform(Y)
X = X.as_matrix().astype("float")
calls=[EarlyStopping(monitor='acc', patience=10), ModelCheckpoint('C:/Users/1383921/Documents/NNs/model', monitor='acc', save_best_only=True, mode='auto', period=1)]
def create_baseline():
# create model
model = Sequential()
model.add(Dense(18, input_dim=9, init='normal', activation='relu'))
model.add(Dense(9, init='normal', activation='relu'))
model.add(Dense(12, init='normal', activation='softmax'))
# Compile model
sgd = SGD(lr=0.01, momentum=0.8, decay=0.0, nesterov=False)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
return model
np.random.seed(seed)
estimators = []
estimators.append(('standardize', StandardScaler()))
estimators.append(('mlp', KerasClassifier(build_fn=create_baseline, nb_epoch=300, batch_size=16, verbose=2)))
pipeline = Pipeline(estimators)
kfold = StratifiedKFold(n_splits=10, shuffle=True, random_state=seed)
results = cross_val_score(pipeline, X, encoded_Y, cv=kfold, fit_params={'mlp__callbacks':calls})
print("Baseline: %.2f%% (%.2f%%)" % (results.mean()*100, results.std()*100))
我开始运行最后一部分的结果是 –
Epoch 1/10
...
Epoch 2/10
等等
它应该是Epoch 1/300,当我在不同的笔记本上运行时,它工作得很好.
你们认为发生了什么? np_epoch = 300 …
解决方法:
什么Keras版本是这个?如果它大于2.0,那么nb_epoch就变成了epochs.否则它默认为10.
标签:python,neural-network,keras,data-science 来源: https://codeday.me/bug/20190712/1443311.html