Commit 3875a58e authored by Joaquin Torres's avatar Joaquin Torres

Relaxed XGBBoost to reduce training time in later scripts

parent 9ad950f3
......@@ -106,9 +106,10 @@ if __name__ == "__main__":
'warm_start': [True, False]},
"AB": {'n_estimators': randint(50, 150),
'learning_rate': uniform(0.8, 1.2)},
"XGB": {'n_estimators': randint(100, 1000),
'max_depth': randint(3, 10),
'learning_rate': uniform(0.01, 0.3)},
"XGB": {'n_estimators': randint(100, 500),
'max_depth': randint(3, 6),
'learning_rate': uniform(0.05, 0.15),
'tree_method': ['hist']},
"LR": {'penalty': ['l1', 'l2', 'elasticnet', None],
'solver': ['lbfgs', 'sag', 'saga']},
"SVM": {'C': uniform(0.8, 1.2),
......@@ -146,6 +147,8 @@ if __name__ == "__main__":
hyperparam_df = pd.DataFrame(index=list(models.keys()), columns=['Best Parameters'])
for model_name, model in models.items():
print(f"{group}-{method_names[j]}-{model_name}")
if model_name != 'XGB':
continue
# Find optimal hyperparams for curr model
params = hyperparameters[model_name]
search = RandomizedSearchCV(model, param_distributions=params, cv=cv, n_jobs=10, scoring='precision')
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment