keras-Dropout剪枝操做的應用網絡
1.載入數據以及預處理優化
import numpy as np from keras.datasets import mnist from keras.utils import np_utils from keras.models import Sequential from keras.layers import * from keras.optimizers import SGD import os import tensorflow as tf # 載入數據 (x_train,y_train),(x_test,y_test) = mnist.load_data() # 預處理 # 將(60000,28,28)轉化爲(600000,784),好輸入展開層 x_train = x_train.reshape(x_train.shape[0],-1)/255.0 x_test= x_test.reshape(x_test.shape[0],-1)/255.0 # 將輸出轉化爲one_hot編碼 y_train = np_utils.to_categorical(y_train,num_classes=10) y_test = np_utils.to_categorical(y_test,num_classes=10)
2.建立網絡打印訓練結果編碼
# 建立網絡 model = Sequential([
Dense(units=128,input_dim=784,bias_initializer='one',activation='tanh'),
# Dropout進行減枝,使得部分訓練參數失效,避免過擬和
Dropout(0.4),
Dense(units=128,bias_initializer='one',activation='tanh'),
Dropout(0.4),
Dense(units=10,bias_initializer='one',activation='softmax')
])
# 編譯
# 自定義優化器
sgd = SGD(lr=0.1) model.compile(optimizer=sgd,
# 運用交叉熵 loss='categorical_crossentropy', metrics=['accuracy']) model.fit(x_train,y_train,batch_size=32,epochs=10,validation_split=0.2) # 評估模型 loss,acc = model.evaluate(x_test,y_test,) print('\ntest loss',loss) print('test acc',acc)
out:lua
Epoch 1/10spa
32/48000 [..............................] - ETA: 5:04 - loss: 2.7763 - acc: 0.1250
576/48000 [..............................] - ETA: 21s - loss: 2.6202 - acc: 0.1354 code
......blog
......input
Epoch 10/10it
47744/48000 [============================>.] - ETA: 0s - loss: 0.1830 - acc: 0.9448
48000/48000 [==============================] - 3s 72us/step - loss: 0.1831 - acc: 0.9449 - val_loss: 0.1210 - val_acc: 0.9649io
32/10000 [..............................] - ETA: 0s
1824/10000 [====>.........................] - ETA: 0s
3616/10000 [=========>....................] - ETA: 0s
5472/10000 [===============>..............] - ETA: 0s
7456/10000 [=====================>........] - ETA: 0s
9440/10000 [===========================>..] - ETA: 0s
10000/10000 [==============================] - 0s 27us/step
test loss 0.11740412595644593test acc 0.9652