train_models.py 4.21 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
"""
    Selecting best models through cross validation and hyperparameter tunning 
    for each method: 
        1. Original training dataset
        2. Original training dataset - Cost sensitive
        3. Oversampling
        4. Undersampling
"""

# Libraries
# --------------------------------------------------------------------------------------------------------
import pandas as pd
import numpy as np
from xgboost import XGBClassifier
from sklearn.metrics import confusion_matrix
from sklearn.metrics import f1_score, make_scorer, precision_score, recall_score
from sklearn.model_selection import StratifiedKFold, cross_validate
from sklearn.ensemble import RandomForestClassifier, BaggingClassifier, AdaBoostClassifier
from sklearn.neural_network import MLPClassifier
from sklearn.svm import SVC
from sklearn.linear_model import  LogisticRegression
from sklearn.tree import DecisionTreeClassifier
# --------------------------------------------------------------------------------------------------------


26
if __name__ == "__main__":
27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80

    # Reading training data
    # --------------------------------------------------------------------------------------------------------
    # Load test data
    X_test_pre = np.load('gen_train_data/data/output/pre/X_test_pre.npy', allow_pickle=True)
    y_test_pre = np.load('gen_train_data/data/output/pre/y_test_pre.npy', allow_pickle=True)
    X_test_post = np.load('gen_train_data/data/output/post/X_test_post.npy', allow_pickle=True)
    y_test_post = np.load('gen_train_data/data/output/post/y_test_post.npy', allow_pickle=True)

    # Load ORIGINAL training data
    X_train_pre = np.load('gen_train_data/data/output/pre/X_train_pre.npy', allow_pickle=True)
    y_train_pre = np.load('gen_train_data/data/output/pre/y_train_pre.npy', allow_pickle=True)
    X_train_post = np.load('gen_train_data/data/output/post/X_train_post.npy', allow_pickle=True)
    y_train_post = np.load('gen_train_data/data/output/post/y_train_post.npy', allow_pickle=True)

    # Load oversampled training data
    X_train_over_pre = np.load('gen_train_data/data/output/pre/X_train_over_pre.npy', allow_pickle=True)
    y_train_over_pre = np.load('gen_train_data/data/output/pre/y_train_over_pre.npy', allow_pickle=True)
    X_train_over_post = np.load('gen_train_data/data/output/post/X_train_over_post.npy', allow_pickle=True)
    y_train_over_post = np.load('gen_train_data/data/output/post/y_train_over_post.npy', allow_pickle=True)

    # Load undersampled training data
    X_train_under_pre = np.load('gen_train_data/data/output/pre/X_train_under_pre.npy', allow_pickle=True)
    y_train_under_pre = np.load('gen_train_data/data/output/pre/y_train_under_pre.npy', allow_pickle=True)
    X_train_under_post = np.load('gen_train_data/data/output/post/X_train_under_post.npy', allow_pickle=True)
    y_train_under_post = np.load('gen_train_data/data/output/post/y_train_under_post.npy', allow_pickle=True)
    # --------------------------------------------------------------------------------------------------------

    # Defining the models to train
    # --------------------------------------------------------------------------------------------------------
    # 1. No class weight
    models_1 = {"DT" : DecisionTreeClassifier(), 
            "RF" : RandomForestClassifier(), 
            "Bagging" : BaggingClassifier(),
            "AB" : AdaBoostClassifier(), 
            "XGB": XGBClassifier(),
            "LR" : LogisticRegression(), 
            "ElNet" : LogisticRegression(penalty='elasticnet'), 
            "SVM" : SVC(), 
            "MLP" : MLPClassifier(),
            }
    
    # 2. Class weight 
    models_2 = {"DT" : DecisionTreeClassifier(class_weight='balanced'), 
            "RF" : RandomForestClassifier(class_weight='balanced'), 
            "Bagging" : BaggingClassifier(), # <-
            "AB" : AdaBoostClassifier(),  # <-
            "XGB": XGBClassifier(), # <-
            "LR" : LogisticRegression(class_weight='balanced'), 
            "ElNet" : LogisticRegression(penalty='elasticnet', class_weight='balanced'), 
            "SVM" : SVC(class_weight='balanced'), 
            "MLP" : MLPClassifier(), # <-
            }
    # --------------------------------------------------------------------------------------------------------