# 如何在python深度学习Keras中计算神经网络集成模型

## 平均模型权重

Polyak平均包括通过优化算法访问的参数空间将轨迹中的几个点平均在一起。

## 多类别分类问题

# generate 2d classification dataset

X,y=make_blobs(n_samples=1000,centers=3,n_features=2,cluster_std=2,random_state=2)

# scatter plot of blobs dataset

# generate 2d classification dataset

X,y=make_blobs(n_samples=1000,centers=3,n_features=2,cluster_std=2,random_state=2)

# scatter plot for each class value

forclass_valueinrange(3):

# select indices of points with the class label

row_ix=where(y==class_value)

# scatter plot for points with a different color

pyplot.scatter(X[row_ix,0],X[row_ix,1])

# show plot

pyplot.show()

# generate 2d classification dataset

X,y=make_blobs(n_samples=1100,centers=3,n_features=2,cluster_std=2,random_state=2)

# one hot encode output variable

y=to_categorical(y)

# split into train and test

n_train=100

trainX,testX=X[:n_train,:],X[n_train:,:]

trainy,testy=y[:n_train],y[n_train:]

# define model

model=Sequential()

opt=SGD(lr=0.01,momentum=0.9)

model.compile(loss='categorical_crossentropy',optimizer=opt,metrics=['accuracy'])

# learning curves of model accuracy

pyplot.plot(history.history['acc'],label='train')

pyplot.plot(history.history['val_acc'],label='test')

pyplot.legend()

pyplot.show()

Train: 0.860, Test: 0.812

## 将多个模型保存到文件

# fit model

n_epochs,n_save_after=500,490

foriinrange(n_epochs):

# fit model for a single epoch

model.fit(trainX,trainy,epochs=1,verbose=0)

# check if we should save the model

ifi>=n_save_after:

model.save('model_'+str(i)+'.h5')

pip install h5py

## 具有平均模型权重的新模型

all_models=list()

forepochinrange(n_start,n_end):

# define filename for this ensemble

filename='model_'+str(epoch)+'.h5'

# add to list of members

all_models.append(model)

returnall_models

_________________________________________________________________

Layer (type)                Output Shape              Param #

=================================================================

dense_1 (Dense)              (None, 25)                75

_________________________________________________________________

dense_2 (Dense)              (None, 3)                78

=================================================================

Total params: 153

Trainable params: 153

Non-trainable params: 0

_________________________________________________________________

## 使用平均模型权重集合进行预测

# evaluate a specific number of members in an ensemble

defevaluate_n_members(members,n_members,testX,testy):

# reverse loaded models so we build the ensemble with the last models first

members=list(reversed(members))

# select a subset of members

subset=members[:n_members]

# prepare an array of equal weights

weights=[1.0/n_membersforiinrange(1,n_members+1)]

# create a new model with the weighted average of all model weights

model=model_weight_ensemble(subset,weights)

# make predictions and evaluate accuracy

_,test_acc=model.evaluate(testX,testy,verbose=0)

returntest_acc

# reverse loaded models so we build the ensemble with the last models first

members=list(reversed(members))

# evaluate different numbers of ensembles on hold out set

single_scores,ensemble_scores=list(),list()

foriinrange(1,len(members)+1):

# evaluate model with i members

ensemble_score=evaluate_n_members(members,i,testX,testy)

# evaluate the i'th model standalone

_,single_score=members[i-1].evaluate(testX,testy,verbose=0)

# summarize this step

print('> %d: single=%.3f, ensemble=%.3f'%(i,single_score,ensemble_score))

ensemble_scores.append(ensemble_score)

single_scores.append(single_score)

# plot score vs number of ensemble members

x_axis=[iforiinrange(1,len(members)+1)]

pyplot.plot(x_axis,single_scores,marker='o',linestyle='None')

pyplot.plot(x_axis,ensemble_scores,marker='o')

pyplot.show()

> 1: single=0.814, ensemble=0.814

> 2: single=0.814, ensemble=0.814

> 3: single=0.811, ensemble=0.813

> 4: single=0.805, ensemble=0.813

> 5: single=0.807, ensemble=0.811

> 6: single=0.805, ensemble=0.807

> 7: single=0.802, ensemble=0.809

> 8: single=0.805, ensemble=0.808

> 9: single=0.805, ensemble=0.808

> 10: single=0.810, ensemble=0.807

# prepare an array of linearly decreasing weights

weights=[i/n_membersforiinrange(n_members,0,-1)]

...

> 1: single=0.814, ensemble=0.814

> 2: single=0.814, ensemble=0.815

> 3: single=0.811, ensemble=0.814

> 4: single=0.805, ensemble=0.813

> 5: single=0.807, ensemble=0.813

> 6: single=0.805, ensemble=0.813

> 7: single=0.802, ensemble=0.811

> 8: single=0.805, ensemble=0.810

> 9: single=0.805, ensemble=0.809

> 10: single=0.810, ensemble=0.809

# prepare an array of exponentially decreasing weights

alpha=2.0

weights=[exp(-i/alpha)foriinrange(1,n_members+1)]

> 1: single=0.814, ensemble=0.814

> 2: single=0.814, ensemble=0.815

> 3: single=0.811, ensemble=0.814

> 4: single=0.805, ensemble=0.814

> 5: single=0.807, ensemble=0.813

> 6: single=0.805, ensemble=0.813

> 7: single=0.802, ensemble=0.813

> 8: single=0.805, ensemble=0.813

> 9: single=0.805, ensemble=0.813

> 10: single=0.810, ensemble=0.813