1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56
| import keras import matplotlib.pyplot as plt import numpy as np import seaborn as sns
from sklearn.metrics import confusion_matrix
with np.load('mnist.npz') as f: x_train, y_train = f['x_train'], f['y_train'] x_test, y_test = f['x_test'], f['y_test']
x_train = x_train.reshape(60000, 28, 28, 1) x_test = x_test.reshape(10000, 28, 28, 1) print(x_train.shape) print(x_test.shape)
model = keras.models.Sequential() model.add(keras.layers.Conv2D(32, (3, 3), activation='relu', input_shape=(28, 28, 1))) model.add(keras.layers.MaxPooling2D((2, 2))) model.add(keras.layers.Conv2D(64, (3, 3), activation='relu')) model.add(keras.layers.MaxPooling2D((2, 2))) model.add(keras.layers.Flatten()) model.add(keras.layers.Dense(64, activation='relu')) model.add(keras.layers.Dense(10, activation='softmax')) model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy']) model.summary()
model.fit(x=x_train, y=y_train, batch_size=512, epochs=10, validation_data=(x_test, y_test))
y_pred = model.predict_classes(x_test) print(y_pred)
con_mat = confusion_matrix(y_test, y_pred)
con_mat_norm = con_mat.astype('float') / con_mat.sum(axis=1)[:, np.newaxis] con_mat_norm = np.around(con_mat_norm, decimals=2)
plt.figure(figsize=(8, 8)) sns.heatmap(con_mat_norm, annot=True, cmap='Blues')
plt.ylim(0, 10) plt.xlabel('Predicted labels') plt.ylabel('True labels') plt.show()
|