0) ๋ฐ์ดํฐ ๋ถ๋ฌ์ค๊ธฐ ๋ฐ ํ์ธ, Normalization
import numpy as np
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Flatten, Dropout
from tensorflow.keras.optimizers import Adam
from tensorflow.keras import regularizers
# ๋ฐ์ดํฐ ๋ถ๋ฌ์ค๊ธฐ
from tensorflow.keras.datasets import cifar100
(X_train, y_train), (X_test, y_test) = cifar100.load_data()
# ๋ฐ์ดํฐ shape ํ์ธ
X_train.shape
# ํ๊ฒ ํ์ธ(ํ๊ท/๋ถ๋ฅ ๊ตฌ๋ถ, ๋ถ๋ฅ์ผ ๋ ์ด์ง ๋ถ๋ฅ/๋ค์ค ๋ถ๋ฅ ์ฌ๋ถ ํ์ธ)
np.unique(y_train)
# ์ด๋ฏธ์ง ๋ฐ์ดํฐ Normalization
X_train = X_train / 255.
X_test = X_test / 255.
1) ๊ธฐ๋ณธ ๋ชจ๋ธ
model = Sequential()
model.add(Flatten(input_shape=(32, 32, 3)))
model.add(Dense(256, activation='relu'))
model.add(Dense(128, activation='relu'))
model.add(Dense(100, activation='softmax'))
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
results = model.fit(X_train, y_train, epochs=30, batch_size=100, verbose=1, validation_split=0.2)
test_loss, test_acc = model.evaluate(X_test, y_test, verbose=2)
2) ๊ณผ์ ํฉ ๋ฐฉ์ง ๋ชจ๋ธ(๊ฐ์ค์น ๊ฐ์, ๋๋กญ์์, ์ผ๋ฆฌ ์คํํ ์ ์ฉ)
model2 = Sequential()
model2.add(Flatten(input_shape=(32, 32, 3)))
model2.add(Dense(256, activation='relu',
kernel_regularizer=regularizers.L2(0.00001),
activity_regularizer=regularizers.L1(0.00001)))
model2.add(Dropout(0.2))
model2.add(Dense(128, activation='relu',
kernel_regularizer=regularizers.L2(0.00001),
activity_regularizer=regularizers.L1(0.00001)))
model2.add(Dropout(0.2))
model2.add(Dense(100, activation='softmax'))
model2.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
# ์ผ๋ฆฌ์คํํ ์ ์ฉ
import keras
early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0, patience=10, verbose=1)
save_best = keras.callbacks.ModelCheckpoint(filepath="best.hdf5",
monitor='val_loss',
verbose=1,
save_best_only=True,
save_weights_only=True,
mode='auto',
save_freq='epoch',
options=None
)
results2 = model2.fit(X_train, y_train, epochs=30, batch_size=100, validation_split=0.2, verbose=1, callbacks=[early_stop, save_best])
# ๊ทธ๋ฅ ํ์ต๋ ๊ฒฐ๊ณผ ํ์ธ( loss: 3.5875, accuracy : 0.1580)
test_loss, test_acc = model2.evaluate(X_test, y_test, verbose=2)
# best weights ๋ชจ๋ธ ํ์ธ
model2.load_weights("best.hdf5")
test_loss, test_acc = model2.evaluate(X_test, y_test, verbose=2)
3) GridSearchCV๋ฅผ ์ด์ฉํ ํ์ดํผํ๋ผ๋ฏธํฐ ํ๋(๊ธฐ๋ณธ ๋ชจ๋ธ์ ์ ์ฉ)
from sklearn.model_selection import GridSearchCV
from tensorflow.keras.wrappers.scikit_learn import KerasClassifier
def build_model3():
model3 = Sequential()
model3.add(Flatten(input_shape=(32, 32, 3)))
model3.add(Dense(256, activation='relu'))
model3.add(Dense(128, activation='relu'))
model3.add(Dense(100, activation='softmax'))
model3.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
return model3
model3 = KerasClassifier(build_fn=build_model3, verbose=1)
# ํ๋ํ ํ์ดํผํ๋ผ๋ฏธํฐ : batch_size ๋ฐ epochs
param_grid = dict(batch_size=[16, 32, 64, 128],
epochs=[10, 20, 30])
model3_grid = GridSearchCV(estimator=model3, param_grid=param_grid, n_jobs=-1)
grid_results = model3_grid.fit(X_train, y_train)
# ๊ฐ grid ์กฐํฉ์ ๋ฐ๋ฅธ accuracy ํ๊ท ๋ฐ ํ์คํธ์ฐจ ๊ฐ
means = grid_results.cv_results_['mean_test_score']
stds = grid_results.cv_results_['std_test_score']
params = grid_results.cv_results_['params']
for mean, stdev, param in zip(means, stds, params):
print(f"Means: {mean}, Stdev: {stdev} with: {param}")
# best grid accuracy ๋ฐ ์ฌ์ฉํ parameter
print(f"Best : {grid_results.best_score_}")
print(f"using_params : {grid_results.best_params_}")
๊ฒฐ๋ก
- ๋ฌด์์ ๊ณผ์ ํฉ์ ๋ฐฉ์งํ๋ ๊ฒ์ ๋๋ ค ํ์ต ํจ์จ์ ๋จ์ด๋จ๋ฆฌ๊ฒ ๋๋ค.
- ๋ชจ๋ธ์ fitํ ๋ ์ ํด์ง๋ ํ์ดํผํ๋ผ๋ฏธํฐ ์ธ์ ๋ค๋ฅธ ํ๋ผ๋ฏธํฐ๋ค ํ๋๋ ์์๋ณด๊ธฐ
๊ธฐ๋ณธ ๋ชจ๋ธ๋ณด๋ค ๋ ์ข์ ์ฑ๋ฅ์ ๋ธ ๊ฒ์ ์๋๊ธฐ์ ์ข์ ์์๋ ์๋์ง๋ง, ๊ณต๋ถํ์๊ธฐ์ ํฐ ํ๋ฆ์ ๋ณด๋ ์ ๋๋ก ๋ด์ฃผ์๋ฉด ์ข์ ๊ฒ ๊ฐ์ต๋๋ค.
์ฝ์ด์ฃผ์ ์ ๊ฐ์ฌํฉ๋๋ค. :)
'๐ฟ Data > ์ด๋ชจ์ ๋ชจ' ์นดํ ๊ณ ๋ฆฌ์ ๋ค๋ฅธ ๊ธ
[๋ฅ๋ฌ๋, NLP] ๋ถ์ฉ์ด, ์ถ์ถ, BoW/TF-IDF (0) | 2022.03.06 |
---|---|
[๋ฅ๋ฌ๋]ํ์ดํผ ํ๋ผ๋ฏธํฐ ํ๋(sklearn์ RandomizedSearchCV, keras_tuner์ RandomSearch) (0) | 2022.03.01 |
[๋ฅ๋ฌ๋]๊ฐ๋จ ์ ๊ฒฝ๋ง ๋ฐ ๋จธ์ ๋ฌ๋ ๋ชจ๋ธ๋ง, ์ฑ๋ฅ ๋น๊ต (0) | 2022.02.26 |
[๋ฅ๋ฌ๋]์ตํฐ๋ง์ด์ (Optimizer) (0) | 2022.02.24 |
[๋ฅ๋ฌ๋]์์ค ํจ์ (0) | 2022.02.23 |