์ค์น
ํ์:
-
Python 3.6
-
TensorFlow 2.0 Beta
pip install git+https://github.com/keras-team/keras-tuner.git
๊ธฐ๋ณธ ์ฌํญ
random search๋ฅผ ์ฌ์ฉํด์ a single-layer dense neural network ํ์ดํผํ๋ผ๋ฏธํฐ ํ๋์ ํด๋ณด์.
๋จผ์ , model-building ํจ์๋ฅผ ์ ์ํ๋ค. hp๋ hyperparameter๋ฅผ ์ํ๋ง ํ ์ ์๋ ์ธ์์ด๋ค.
ex) hp.Range('units', min_value=32, max_value=512, step=32) (ํน์ ๋ฒ์์ ์ ์)
return์ ์ปดํ์ผ๋ model
from tensorflow import keras
from tensorflow.keras import layers
from kerastuner.tuners import RandomSearch
def build_model(hp):
model = keras.Sequential()
model.add(layers.Dense(units=hp.Range('units',
min_value=32,
max_value=512,
step=32),
activation='relu'))
model.add(layers.Dense(10, activation='softmax'))
model.compile(
optimizer=keras.optimizers.Adam(
hp.Choice('learning_rate',
values=[1e-2, 1e-3, 1e-4])),
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
return model
๋ค์์, tuner๋ฅผ ์ธ์คํด์คํ ํ๋ค.
์ฌ์ฉ๊ฐ๋ฅํ tuner๋ก RandomSearch, Hyperband๊ฐ ์์
์ฐธ๊ณ : executions per trial์ ์ฌ๋ฌ๋ณ ํ๋ ์ด์ ๋ ๊ฒฐ๊ณผ ์ฐจ์ด๋ฅผ ์ค์ฌ์ ๋ชจ๋ธ ์ฑ๋ฅ์ ์ ํํ๊ฒ ํ๊ฐ ํ ์ ์๋ค. ๊ฒฐ๊ณผ๋ฅผ ๋นจ๋ฆฌ ์ป์ผ๋ ค๋ฉด executions_per_trial=1๋ก ํ๋ค.
tuner = RandomSearch(
build_model, # model-building ํจ์
objective='val_accuracy', # ์ต์ ํ ํ objective (
max_trials=5, # ํ
์คํธํ trials ์
executions_per_trial=3, # ๊ฐ trial์ built & fit์ ํ์ํ ๋ชจ๋ธ ์
directory='my_dir',
project_name='helloworld')
the search space ์์ฝ ๋ณด๊ธฐ:
tuner.search_space_summary()
์ด์ ์ต๊ณ ์ hyperparameter configuration ๊ฒ์์ ์์ํ๋ค. search ํธ์ถ์ model.fit() ๊ณผ ๊ฐ์
tuner.search(x, y,
epochs=5,
validation_data=(val_x, val_y))
search์์ ์ผ์ด๋๋ ์ผ : ๋ชจ๋ธ์ hp ๊ฐ์ฒด๊ฐ ์ถ์ ํ๋ hyperparameter space (search space)์ ์ฑ์ฐ๋ model-building ํจ์๊ฐ ํธ์ถ๋๋ฉด์ ๋ฐ๋ณต์ ์ผ๋ก ๋น๋๋๋ค.
๊ฒ์์ด ๋๋๋ฉด ์ต๊ณ ์ ๋ชจ๋ธ์ ์ป์์ ์๋ค.
models = tuner.get_best_models ( num_models = 2 )
๊ฒฐ๊ณผ ์์ฝ ๋ณด๊ธฐ ~
tuner.results_summary ()
search space๋ ์กฐ๊ฑด๋ถ hyperparameters ๊ฐ๋ฅ
def build_model(hp):
model = keras.Sequential()
for i in range(hp.Range('num_layers', 2, 20)):
model.add(layers.Dense(units=hp.Range('units_' + str(i),
min_value=32,
max_value=512,
step=32),
activation='relu'))
model.add(layers.Dense(10, activation='softmax'))
model.compile(
optimizer=keras.optimizers.Adam(
hp.Choice('learning_rate', [1e-2, 1e-3, 1e-4])),
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
return model
model-building function ๋์ HyperModel subclass ์ฌ์ฉ
build(self, hp) method๋ง ๊ตฌํํ๋ฉด hypermodels ์ฌ์ฌ์ฉ&๊ณต์ ๋ฅผ ์ฝ๊ฒ ํ ์ ์๋ค.
from kerastuner import HyperModel
class MyHyperModel(HyperModel):
def __init__(self, num_classes):
self.num_classes = num_classes
def build(self, hp):
model = keras.Sequential()
model.add(layers.Dense(units=hp.Range('units',
min_value=32,
max_value=512,
step=32),
activation='relu'))
model.add(layers.Dense(self.num_classes, activation='softmax'))
model.compile(
optimizer=keras.optimizers.Adam(
hp.Choice('learning_rate',
values=[1e-2, 1e-3, 1e-4])),
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
return model
hypermodel = MyHyperModel(num_classes=10)
tuner = RandomSearch(
hypermodel,
objective='val_accuracy',
max_trials=10,
directory='my_dir',
project_name='helloworld')
tuner.search(x, y,
epochs=5,
validation_data=(val_x, val_y))
from kerastuner import HyperParameters
hypermodel = HyperXception(input_shape=(128, 128, 3), num_classes=10)
hp = HyperParameters()
# This will override the `learning_rate` parameter with your
# own selection of choices
hp.Choice('learning_rate', values=[1e-2, 1e-3, 1e-4])
tuner = Hyperband(
hypermodel,
hyperparameters=hp,
# `tune_new_entries=False` prevents unlisted parameters from being tuned
tune_new_entries=False,
objective='val_accuracy',
max_trials=40,
directory='my_dir',
project_name='helloworld')
tuner.search(x, y,
epochs=20,
validation_data=(val_x, val_y))
parameter ๊ธฐ๋ณธ ๊ฐ
model-building function ์ด๋ hypermodel์ build ๋ฉ์๋์ hyperparameter ๋ฑ๋ก ํ ๋ ๊ธฐ๋ณธ ๊ฐ ์ง์ ๊ฐ๋ฅํจ:
hp.Range('units',
min_value=32,
max_value=512,
step=32,
default=128)
์ํ๋ฉด hyperparameters ๋ ํญ์ default ๊ฐ์ ๊ฐ์ง๋ค. (Range์ ๊ฒฝ์ฐ it is equal to min_value)
์ฐธ๊ณ
https://github.com/keras-team/keras-tuner
'Machine Learning > ๋ฅ๋ฌ๋' ์นดํ ๊ณ ๋ฆฌ์ ๋ค๋ฅธ ๊ธ
๋ฅ๋ฌ๋ ๋ชจ๋ธ์ ๊ต์ฐจ๊ฒ์ฆ (Cross Validation) (0) | 2019.08.21 |
---|---|
[Machine Learning] Train data normalization (0) | 2019.07.12 |
MDN (0) | 2019.07.11 |
Hyperparameter Tuning (1) | 2019.07.11 |
Relu ํจ์ (0) | 2019.06.28 |