elif metric == "knn_d2_max":
return np.max(distances[:, 1:], axis=1)
else:
- raise ValueError(f"Unsupported distance metric: {metric}")
+ raise ValueError(
+ f"Unsupported label metric: {metric}. Supported metrics are {', '.join(metrics)}"
+ )
objective_values_matrix = np.array([trial.values for trial in best_trials])
normalized_matrix = np.zeros_like(objective_values_matrix, dtype=float)
return False
+regressors = {"xgboost", "lightgbm"}
+
+
def get_callbacks(trial: optuna.trial.Trial, regressor: str) -> list[Callable]:
if regressor == "xgboost":
callbacks = [
elif regressor == "lightgbm":
callbacks = [optuna.integration.LightGBMPruningCallback(trial, "rmse")]
else:
- raise ValueError(f"Unsupported regressor model: {regressor}")
+ raise ValueError(
+ f"Unsupported regressor model: {regressor} (supported: {', '.join(regressors)})"
+ )
return callbacks
callbacks=callbacks,
)
else:
- raise ValueError(f"Unsupported regressor model: {regressor}")
+ raise ValueError(
+ f"Unsupported regressor model: {regressor} (supported: {', '.join(regressors)})"
+ )
return model
regressor: str,
model_training_best_parameters: dict[str, Any],
) -> dict[str, Any]:
+ if regressor not in regressors:
+ raise ValueError(
+ f"Unsupported regressor model: {regressor} (supported: {', '.join(regressors)})"
+ )
default_ranges = {
"learning_rate": (1e-3, 0.5),
"min_child_weight": (1e-8, 100.0),