jangjusung / jusung-python

0 stars 0 forks source link

MNIST_conv2d #34

Open jangjusung opened 2 years ago

jangjusung commented 2 years ago

-- coding: utf-8 --

from keras.datasets import mnist from keras.utils import np_utils

tf.keras.utils.to_categorical(Y)

pandas.get_dummies

from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Dropout, Flatten, Conv2D, MaxPooling2D from tensorflow.keras.callbacks import ModelCheckpoint,EarlyStopping import matplotlib.pyplot as plt import numpy import os import tensorflow as tf

seed 값 설정

seed = 3 numpy.random.seed(seed) tf.random.set_seed(3)

데이터 불러오기

(X_train, Y_train), (X_test, Y_test) = mnist.load_data() X_train=X_train.reshape(X_train.shape[0], 28, 28, 1).astype('float32')/255 X_test=X_test.reshape(X_test.shape[0], 28, 28, 1).astype('float32')/255 Y_train = np_utils.to_categorical(Y_train) Y_test = np_utils.to_categorical(Y_test)

tf.keras.utils.to_categorical(Y_train)

컨볼루션 신경망의 설정

model = Sequential() model.add(Conv2D(32, kernel_size=(3, 3), input_shape=(28, 28, 1), activation='relu')) model.add(Conv2D(64, (3, 3), activation='relu')) model.add(MaxPooling2D(pool_size=2)) model.add(Dropout(0.25)) model.add(Flatten()) model.add(Dense(128, activation='relu')) model.add(Dropout(0.5)) model.add(Dense(10, activation='softmax'))

model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) model.summary()

모델 최적화 설정

MODEL_DIR = './model/' if not os.path.exists(MODEL_DIR): os.mkdir(MODEL_DIR)

modelpath="./model/{epoch:02d}-{val_loss:.4f}.hdf5" checkpointer = ModelCheckpoint(filepath=modelpath, monitor='val_loss', verbose=1, save_best_only=True) early_stopping_callback = EarlyStopping(monitor='val_loss', patience=10)

모델의 실행

history = model.fit(X_train, Y_train, validation_data=(X_test, Y_test), epochs=30, batch_size=200, verbose=0, callbacks=[early_stopping_callback,checkpointer])

테스트 정확도 출력

print("\n Test Accuracy: %.4f" % (model.evaluate(X_test, Y_test)[1]))

테스트 셋의 오차

y_vloss = history.history['val_loss']

학습셋의 오차

y_loss = history.history['loss']

그래프로 표현

x_len = numpy.arange(len(y_loss)) plt.plot(x_len, y_vloss, marker='.', c="red", label='Testset_loss') plt.plot(x_len, y_loss, marker='.', c="blue", label='Trainset_loss')

그래프에 그리드를 주고 레이블을 표시

plt.legend(loc='upper right') plt.grid() plt.xlabel('epoch') plt.ylabel('loss') plt.show()

(train_input, train_target), (test_input, test_target) = tf.keras.datasets.cifar100.load_data()

(train_input, train_target), (test_input, test_target) = tf.keras.datasets.cifar10.load_data()

train_input = train_input.reshape(train_input.shape[0], 32,32,3).astype('float32') / 255 test_input = test_input.reshape(test_input.shape[0], 32,32,3).astype('float32') / 255 train_target= np_utils.to_categorical(train_target) test_target = np_utils.to_categorical(test_target)

model = Sequential() model.add(Conv2D(32, kernel_size=(3, 3), input_shape=(32,32,3), activation='relu')) model.add(Conv2D(64, (3, 3), activation='relu')) model.add(MaxPooling2D(pool_size=2)) model.add(Dropout(0.25)) model.add(Flatten()) model.add(Dense(128, activation='relu')) model.add(Dropout(0.5)) model.add(Dense(10, activation='softmax'))

model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])

model.summary()

MODEL_DIR = './model/' if not os.path.exists(MODEL_DIR): os.mkdir(MODEL_DIR)

modelpath="./model/{epoch:02d}-{val_loss:.4f}.hdf5" checkpointer = ModelCheckpoint(filepath=modelpath, monitor='val_loss', verbose=1, save_best_only=True) early_stopping_callback = EarlyStopping(monitor='val_loss', patience=10)

모델의 실행

history = model.fit(train_input, train_target, validation_data=(test_input, test_target), epochs=10, batch_size=200, verbose=1, callbacks=[early_stopping_callback,checkpointer])

테스트 정확도 출력

print("\n Test Accuracy: %.4f" % (model.evaluate(test_input, test_target)[1]))

테스트 셋의 오차

y_vloss = history.history['val_loss']

학습셋의 오차

y_loss = history.history['loss']

그래프로 표현

x_len = numpy.arange(len(y_loss)) plt.plot(x_len, y_vloss, marker='.', c="red", label='Testset_loss') plt.plot(x_len, y_loss, marker='.', c="blue", label='Trainset_loss')

그래프에 그리드를 주고 레이블을 표시

plt.legend(loc='upper right') plt.grid() plt.xlabel('epoch') plt.ylabel('loss') plt.show()