From 3875a58e7a1c767875bb8645245e134b66b9ee21 Mon Sep 17 00:00:00 2001 From: Joaquin Torres Bravo Date: Mon, 3 Jun 2024 12:32:04 +0200 Subject: [PATCH] Relaxed XGBBoost to reduce training time in later scripts --- model_selection/hyperparam_tuning.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/model_selection/hyperparam_tuning.py b/model_selection/hyperparam_tuning.py index 7a76d5c..3fb508d 100644 --- a/model_selection/hyperparam_tuning.py +++ b/model_selection/hyperparam_tuning.py @@ -106,9 +106,10 @@ if __name__ == "__main__": 'warm_start': [True, False]}, "AB": {'n_estimators': randint(50, 150), 'learning_rate': uniform(0.8, 1.2)}, - "XGB": {'n_estimators': randint(100, 1000), - 'max_depth': randint(3, 10), - 'learning_rate': uniform(0.01, 0.3)}, + "XGB": {'n_estimators': randint(100, 500), + 'max_depth': randint(3, 6), + 'learning_rate': uniform(0.05, 0.15), + 'tree_method': ['hist']}, "LR": {'penalty': ['l1', 'l2', 'elasticnet', None], 'solver': ['lbfgs', 'sag', 'saga']}, "SVM": {'C': uniform(0.8, 1.2), @@ -146,6 +147,8 @@ if __name__ == "__main__": hyperparam_df = pd.DataFrame(index=list(models.keys()), columns=['Best Parameters']) for model_name, model in models.items(): print(f"{group}-{method_names[j]}-{model_name}") + if model_name != 'XGB': + continue # Find optimal hyperparams for curr model params = hyperparameters[model_name] search = RandomizedSearchCV(model, param_distributions=params, cv=cv, n_jobs=10, scoring='precision') -- 2.24.1