Commit 60dc40b6 authored by Joaquin Torres's avatar Joaquin Torres

Completed def of 2.1 and 2.4 as well, and identified typo in hyperparam script...

Completed def of 2.1 and 2.4 as well, and identified typo in hyperparam script leading to incorrect results in 2.2 and 2.3
parent af398543
......@@ -62,7 +62,6 @@ def get_tuned_models(group_id, method_id):
"LR": LogisticRegression(**{'solver': 'lbfgs', 'penalty': 'none', 'max_iter': 1000, 'class_weight': 'balanced'}),
"SVM": SVC(**{'C': 1.5550524351360953, 'kernel': 'linear', 'max_iter': 1000, 'class_weight': 'balanced'}),
}
# 1.3) Trained with oversampled training dataset
elif method_id == 2:
tuned_models = {
......@@ -103,13 +102,38 @@ def get_tuned_models(group_id, method_id):
}
# 2.2) Trained with original dataset and cost-sensitive learning
elif method_id == 1:
...
tuned_models = {
# "DT": DecisionTreeClassifier(**{'splitter': 'best', 'max_features': 'log2', 'criterion': 'entropy', 'class_weight': 'balanced'}),
# "RF": RandomForestClassifier(**{'criterion': 'entropy', 'max_features': 'sqrt', 'n_estimators': 118, 'class_weight': 'balanced'}),
# "Bagging": BaggingClassifier(**{'max_features': 1.0, 'max_samples': 1.0, 'n_estimators': 15, 'warm_start': False, 'estimator': DecisionTreeClassifier(class_weight='balanced')}),
# "AB": AdaBoostClassifier(**{'learning_rate': 0.8159074545140872, 'n_estimators': 121, 'algorithm': 'SAMME', 'estimator': DecisionTreeClassifier(class_weight='balanced')}),
# "LR": LogisticRegression(**{'solver': 'lbfgs', 'penalty': 'none', 'max_iter': 1000, 'class_weight': 'balanced'}),
# "SVM": SVC(**{'C': 1.5550524351360953, 'kernel': 'linear', 'max_iter': 1000, 'class_weight': 'balanced'}),
}
# 2.3) Trained with oversampled training dataset
elif method_id == 2:
...
tuned_models = {
# "DT" : DecisionTreeClassifier(**{'splitter': 'random', 'max_features': 'sqrt', 'criterion': 'log_loss'}),
# "RF" : RandomForestClassifier(**{'criterion': 'gini', 'max_features': 'sqrt', 'n_estimators': 135}),
# "Bagging" : BaggingClassifier(**{'max_features': 1.0, 'max_samples': 1.0, 'n_estimators': 26, 'warm_start': True}),
# "AB" : AdaBoostClassifier(**{'learning_rate': 1.6590924545876917, 'n_estimators': 141, 'algorithm': 'SAMME'}),
# "XGB": XGBClassifier(**{'learning_rate': 0.26946295284728783, 'max_depth': 7, 'n_estimators': 893}),
# "LR" : LogisticRegression(**{'solver': 'lbfgs', 'penalty': 'l2', 'max_iter': 1000}),
# "SVM" : SVC(**{'C': 1.676419306008229, 'kernel': 'poly', 'max_iter':1000}),
# "MLP" : MLPClassifier(**{'activation': 'relu', 'hidden_layer_sizes': 116, 'learning_rate': 'invscaling', 'max_iter':500})
}
# 2.4) Trained with undersampled training dataset
elif method_id == 3:
...
tuned_models = {
"DT" : DecisionTreeClassifier(**{'splitter': 'best', 'max_features': 'sqrt', 'criterion': 'entropy'}),
"RF" : RandomForestClassifier(**{'criterion': 'gini', 'max_features': 'sqrt', 'n_estimators': 224}),
"Bagging" : BaggingClassifier(**{'max_features': 1.0, 'max_samples': 0.8, 'n_estimators': 13, 'warm_start': True}),
"AB" : AdaBoostClassifier(**{'learning_rate': 1.836659462701278, 'n_estimators': 138, 'algorithm': 'SAMME'}),
"XGB": XGBClassifier(**{'learning_rate': 0.2517946893282251, 'max_depth': 4, 'n_estimators': 646}),
"LR" : LogisticRegression(**{'solver': 'lbfgs', 'penalty': 'l2', 'max_iter': 1000}),
"SVM" : SVC(**{'C': 1.8414678085000697, 'kernel': 'linear', 'max_iter':1000}),
"MLP" : MLPClassifier(**{'activation': 'relu', 'hidden_layer_sizes': 76, 'learning_rate': 'constant', 'max_iter':500})
}
return tuned_models
# --------------------------------------------------------------------------------------------------------
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment