import kerastuner as kt
## Sequential 함수 설계
def build_hyper_model(hp):
model = keras.Sequential()
model.add(layers.Dense(input_dim=13, units=16, activation='relu', kernel_initializer=initializers.he_normal(), kernel_regularizer=regularizers.l2(0.001)))
# hidden layer 조정 (1~3 중 최적값 선택)
for i in range(hp.Int('num_layers', min_value=1, max_value=3)):
# perceptrons 수 조정 (32~512 사이에서 최적값 선택)
hp_units = hp.Int('units_' + str(i), min_value=8, max_value=256, step=32)
hp_activations = hp.Choice('activation_' + str(i), values=['relu', 'elu'])
model.add(layers.Dense(units=hp_units, activation=hp_activations,
kernel_initializer=initializers.he_normal()))
model.add(layers.Dense(2, activation='softmax'))
# optimizer의 학습률 조정(0.01, 0.001 또는 0.0001에서 최적 값 선택)
hp_learning_rate = hp.Choice('learning_rate', values = [1e-2, 1e-3, 1e-4])
model.compile(optimizer = keras.optimizers.Adam(learning_rate = hp_learning_rate),
loss = keras.losses.CategoricalCrossentropy(),
metrics = ['accuracy']) # tf.keras.metrics.CategoricalAccuracy()
return model
## Tuner 선택 (BayesianOptimization, RandomSearch, Hyperband 3가지 중 하나 선택)
## 위 Data의 경우, BayesianOptimization 결과 값이 가장 잘 나왔음
tuner = kt.BayesianOptimization(build_hyper_model,
objective = 'val_accuracy', # Hyper-params tuning을 위한 목적함수 설정 (metric to minimize or maximize)
max_trials = 10, # 서로 다른 Hyper-params 조합으로 시도할 총 Trial 횟수 설정
directory = 'test_prac_dir', # Path to the working directory
project_name = 'heart_hyper_1') # Name to use as directory name for files saved by this Tuner
##==================================================================================
# from kerastuner.tuners import RandomSearch
# tuner = RandomSearch(
# build_model, # HyperModel
# objective='val_accuracy', # 최적화할 하이퍼모델
# max_trials=10,
# executions_per_trial=3, # 각 모델별 학습 회수
# directory='/content/sample_data', # 사용된 parameter 저장할 폴더
# project_name='RandomSearch_tune_res') # 사용된 parameter 저장할 폴더
# #==================================================================================
# from kerastuner.tuners import Hyperband
# tuner = kt.Hyperband(
# build_model, # HyperModel
# objective ='val_accuracy', # 최적화할 하이퍼모델
# max_epochs =20, # 각 모델별 학습 회수
# factor = 3, # 한 번에 훈련할 모델 수 결정 변수
# directory ='/content/sample_data', # 사용된 parameter 저장할 폴더
# project_name ='Hyperband_tune_res') # 사용된 parameter 저장할 폴더
tuner.search_space_summary()
tuner.search(train_data_scaled,
train_label,
batch_size=100,
epochs=20,
validation_split=0.2)
tuner.results_summary(num_trials=3)
## Accuracy 높은 상위 3개 모델 확인 후 저장
top3_models = tuner.get_best_hyperparameters(num_trials=3)
for idx, model in enumerate(top3_models):
print('Model performance rank :', idx)
print(model.values)
print()
# Check the best trial's hyper-params
best_hps = top3_models[0]
print("""
The hyperparameter search is complete.
* Optimal # of layers : {}
* Optimal value of the learning-rate : {}""".format(best_hps.get('num_layers'), best_hps.get('learning_rate')))
for layer_num in range(best_hps.get('num_layers')):
print('Layer {} - # of Perceptrons :'.format(layer_num), best_hps.get('units_' + str(layer_num)))
print('Layer {} - Applied activation function :'.format(layer_num), best_hps.get('activation_' + str(layer_num)))