我正在使用 Hyperopt 来优化 XGBRegressor 模型的超参数,但收到错误:
reg_alpha = int(空格['reg_alpha'],min_child_weight=空格['min_child_weight'],
类型错误:int() 最多接受 2 个参数(给定 3 个)
from scipy.constants import hp
from sklearn.metrics import mean_squared_error
from xgboost import XGBRegressor
space={'max_depth': hp.quniform("max_depth", 3, 18, 1),
'gamma': hp.uniform ('gamma', 1,9),
'reg_alpha' : hp.quniform('reg_alpha', 40,180,1),
'reg_lambda' : hp.uniform('reg_lambda', 0,1),
'colsample_bytree' : hp.uniform('colsample_bytree', 0.5,1),
'min_child_weight' : hp.quniform('min_child_weight', 0, 10, 1),
'n_estimators': 180
}
def hyperparameter_tuning(space):
model = XGBRegressor(
n_estimators=space["n_estimators"],
max_depth=int(space["max_depth"]),
gamma=space["gamma"],
reg_alpha=int(
space["reg_alpha"],
min_child_weight=space["min_child_weight"],
colsample_bytree=space["colsample_bytree"],
),
)
evaluation = [(X_train, y_train), (X_valid, y_valid)]
model.fit(
X_train,
y_train,
eval_set=evaluation,
eval_metric="rmse",
early_stopping_rounds=10,
verbose=False,
)
pred = model.predict(X_valid)
mse = mean_squared_error(y_valid, pred)
print("SCORE:", mse)
return {"loss": mse, "status": STATUS_OK, "model": model}
你写的
reg_alpha=int(
space["reg_alpha"],
min_child_weight=space["min_child_weight"],
colsample_bytree=space["colsample_bytree"],
),
看来作者的意图是
reg_alpha=int(
space["reg_alpha"]),
min_child_weight=space["min_child_weight"],
colsample_bytree=space["colsample_bytree"],