python - 警告:Tensorflow:只能在 f1 可用的情况下保存最佳模型,跳过
问题描述
我正在使用 Keras 调谐器进行超参数调整。我制作了一个自定义目标函数-
from keras import backend as K
def recall_m(y_true, y_pred):
true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
possible_positives = K.sum(K.round(K.clip(y_true, 0, 1)))
recall = true_positives / (possible_positives + K.epsilon())
return recall
def precision_m(y_true, y_pred):
true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1)))
precision = true_positives / (predicted_positives + K.epsilon())
return precision
def f1_m(y_true, y_pred):
precision = precision_m(y_true, y_pred)
recall = recall_m(y_true, y_pred)
f1_m = 2*((precision*recall)/(precision+recall+K.epsilon()))
return f1_m
编译功能是 -
model.compile(
optimizer=keras.optimizers.Adam(
hp.Float(
"learning_rate",
min_value=1e-6,
max_value=1e-2,
sampling="LOG",
default=1e-3,
)
),
loss="binary_crossentropy",
metrics=[f1_m],
)
这是我打电话给调谐器的地方
import time
from kerastuner.tuners import (
BayesianOptimization,
Hyperband,
RandomSearch,
)
from loguru import logger
from pathlib import Path
SEED = 1
NUM_CLASSES = 1
INPUT_SHAPE = (25,1)
N_EPOCH_SEARCH = 40
HYPERBAND_MAX_EPOCHS = 40
MAX_TRIALS = 20
EXECUTION_PER_TRIAL = 2
BAYESIAN_NUM_INITIAL_POINTS = 1
def run_hyperparameter_tuning():
hypermodel = CNNHyperModel(input_shape=INPUT_SHAPE, num_classes=NUM_CLASSES)
output_dir = Path("drive/MyDrive/New_Tuning_upload_latest_3/")
tuners = define_tuners(
hypermodel, directory=output_dir, project_name="simple_cnn_tuning_part3"
)
results = []
for tuner in tuners:
elapsed_time, loss, accuracy = tuner_evaluation(
tuner, X_test, X_train, y_test, y_train
)
logger.info(
f"Elapsed time = {elapsed_time:10.4f} s, accuracy = {accuracy}, loss = {loss}"
)
results.append([elapsed_time, loss, accuracy])
logger.info(results)
def tuner_evaluation(tuner, X_test, X_train, y_test, y_train):
set_gpu_config()
# Overview of the task
tuner.search_space_summary()
# Performs the hyperparameter tuning
logger.info("Start hyperparameter tuning")
search_start = time.time()
tuner.search(X_train, y_train, epochs=N_EPOCH_SEARCH, validation_split=0.1)
search_end = time.time()
elapsed_time = search_end - search_start
# Show a summary of the search
tuner.results_summary()
# Retrieve the best model.
best_model = tuner.get_best_models(num_models=1)[0]
# Evaluate the best model.
loss, accuracy = best_model.evaluate(X_test, y_test)
return elapsed_time, loss, accuracy
def define_tuners(hypermodel, directory, project_name):
random_tuner = RandomSearch(
hypermodel,
objective= Objective("f1", direction='max'),
seed=SEED,
max_trials=MAX_TRIALS,
executions_per_trial=EXECUTION_PER_TRIAL,
directory=f"{directory}_random_search",
project_name=project_name,
)
hyperband_tuner = Hyperband(
hypermodel,
max_epochs=HYPERBAND_MAX_EPOCHS,
objective= Objective("f1", direction='max'),
seed=SEED,
executions_per_trial=EXECUTION_PER_TRIAL,
directory=f"{directory}_hyperband",
project_name=project_name,
)
return [random_tuner, hyperband_tuner]
if __name__ == "__main__":
run_hyperparameter_tuning()
当我为每个时代训练时,警告就来了
解决方案
推荐阅读
- javascript - 我如何传递匹配的 id?
- c# - 根据条件使用另一个列表中的值更新一个列表
- java - Java中的随机播放问题(复发?)
- python - 在 vscode 中丢失了 python 语法高亮
- amazon-web-services - 有没有办法使用 Node.js 确定 AWS DynamoDB 项目中的属性类型?
- algorithm - 从两个集合中返回第 j 个最小元素的算法
- sql - 是否可以在 Oracle SQL 中自动创建触发器?
- c - gdb:级联信号。关于外层的信息 - HOWTO
- python - python pandas数据框中的热图标签和位置
- wordpress - 该网站两次出现意外行为。是不是受到了攻击?还是有安全隐患?