Commit 37c5050e authored by Joaquin Torres's avatar Joaquin Torres

2.2. completed model definition

parent 17c2236c
......@@ -59,7 +59,7 @@ def get_tuned_models(group_id, method_id):
"RF": RandomForestClassifier(**{'criterion': 'entropy', 'max_features': 'sqrt', 'n_estimators': 118, 'class_weight': 'balanced'}),
"Bagging": BaggingClassifier(**{'max_features': 1.0, 'max_samples': 1.0, 'n_estimators': 15, 'warm_start': False, 'estimator': DecisionTreeClassifier(class_weight='balanced')}),
"AB": AdaBoostClassifier(**{'learning_rate': 0.8159074545140872, 'n_estimators': 121, 'algorithm': 'SAMME', 'estimator': DecisionTreeClassifier(class_weight='balanced')}),
"LR": LogisticRegression(**{'solver': 'lbfgs', 'penalty': 'none', 'max_iter': 1000, 'class_weight': 'balanced'}),
"LR": LogisticRegression(**{'solver': 'lbfgs', 'penalty': None, 'max_iter': 1000, 'class_weight': 'balanced'}),
"SVM": SVC(**{'C': 1.5550524351360953, 'kernel': 'linear', 'max_iter': 1000, 'class_weight': 'balanced'}),
}
# 1.3) Trained with oversampled training dataset
......@@ -103,12 +103,12 @@ def get_tuned_models(group_id, method_id):
# 2.2) Trained with original dataset and cost-sensitive learning
elif method_id == 1:
tuned_models = {
# "DT": DecisionTreeClassifier(**{'splitter': 'best', 'max_features': 'log2', 'criterion': 'entropy', 'class_weight': 'balanced'}),
# "RF": RandomForestClassifier(**{'criterion': 'entropy', 'max_features': 'sqrt', 'n_estimators': 118, 'class_weight': 'balanced'}),
# "Bagging": BaggingClassifier(**{'max_features': 1.0, 'max_samples': 1.0, 'n_estimators': 15, 'warm_start': False, 'estimator': DecisionTreeClassifier(class_weight='balanced')}),
# "AB": AdaBoostClassifier(**{'learning_rate': 0.8159074545140872, 'n_estimators': 121, 'algorithm': 'SAMME', 'estimator': DecisionTreeClassifier(class_weight='balanced')}),
# "LR": LogisticRegression(**{'solver': 'lbfgs', 'penalty': 'none', 'max_iter': 1000, 'class_weight': 'balanced'}),
# "SVM": SVC(**{'C': 1.5550524351360953, 'kernel': 'linear', 'max_iter': 1000, 'class_weight': 'balanced'}),
"DT": DecisionTreeClassifier(**{'splitter': 'best', 'max_features': 'sqrt', 'criterion': 'log_loss', 'class_weight': 'balanced'}),
"RF": RandomForestClassifier(**{'criterion': 'entropy', 'max_features': 'sqrt', 'n_estimators': 164, 'class_weight': 'balanced'}),
"Bagging": BaggingClassifier(**{'max_features': 1.0, 'max_samples': 0.8, 'n_estimators': 11, 'warm_start': True, 'estimator': DecisionTreeClassifier(class_weight='balanced')}),
"AB": AdaBoostClassifier(**{'learning_rate': 1.7102248217141944, 'n_estimators': 108, 'algorithm': 'SAMME', 'estimator': DecisionTreeClassifier(class_weight='balanced')}),
"LR": LogisticRegression(**{'solver': 'lbfgs', 'penalty': None, 'max_iter': 1000, 'class_weight': 'balanced'}),
"SVM": SVC(**{'C': 1.1313840454519628, 'kernel': 'sigmoid', 'max_iter': 1000, 'class_weight': 'balanced'})
}
# 2.3) Trained with oversampled training dataset
elif method_id == 2:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment