Commit 6975fa04 authored by Joaquin Torres's avatar Joaquin Torres

Ran for all models

parent 1e976bc5
......@@ -250,31 +250,30 @@ if __name__ == "__main__":
# Scores df
scores_df = pd.DataFrame(index=models.keys(), columns=scorings.keys())
# Create a figure for all models in this group-method
fig, axes = plt.subplots(len(models), 2, figsize=(12, 8 * len(models)))
fig, axes = plt.subplots(len(models), 2, figsize=(8, 8 * len(models)))
if len(models) == 1: # Adjustment if there's only one model (axes indexing issue)
axes = [axes]
# Evaluate each model
for model_idx, (model_name, model) in enumerate(models.items()):
# ----------- TEMPORAL -------------
if model_name == "DT" or model_name == "RF":
# Train the model (it was just initialized above)
model.fit(X_train, y_train)
if hasattr(model, "decision_function"):
y_score = model.decision_function(X_test)
else:
y_score = model.predict_proba(X_test)[:, 1] # Use probability of positive class
# Calculate ROC curve and ROC area for each class
fpr, tpr, _ = roc_curve(y_test, y_score, pos_label=model.classes_[1])
roc_display = RocCurveDisplay(fpr=fpr, tpr=tpr).plot(ax=axes[model_idx][0])
# Calculate precision-recall curve
precision, recall, _ = precision_recall_curve(y_test, y_score, pos_label=model.classes_[1])
pr_display = PrecisionRecallDisplay(precision=precision, recall=recall).plot(ax=axes[model_idx][1])
axes[model_idx][0].set_title(f'ROC Curve for {model_name}')
axes[model_idx][1].set_title(f'PR Curve for {model_name}')
# Evaluate at each of the scores of interest
for score_name, scorer in scorings.items():
score_value = scorer(model, X_test, y_test)
scores_df.at[model_name, score_name] = score_value
# Train the model (it was just initialized above)
model.fit(X_train, y_train)
if hasattr(model, "decision_function"):
y_score = model.decision_function(X_test)
else:
y_score = model.predict_proba(X_test)[:, 1] # Use probability of positive class
# Calculate ROC curve and ROC area for each class
fpr, tpr, _ = roc_curve(y_test, y_score, pos_label=model.classes_[1])
roc_display = RocCurveDisplay(fpr=fpr, tpr=tpr).plot(ax=axes[model_idx][0])
# Calculate precision-recall curve
precision, recall, _ = precision_recall_curve(y_test, y_score, pos_label=model.classes_[1])
pr_display = PrecisionRecallDisplay(precision=precision, recall=recall).plot(ax=axes[model_idx][1])
axes[model_idx][0].set_title(f'ROC Curve for {model_name}')
axes[model_idx][1].set_title(f'PR Curve for {model_name}')
# Evaluate at each of the scores of interest
for score_name, scorer in scorings.items():
score_value = scorer(model, X_test, y_test)
scores_df.at[model_name, score_name] = score_value
# Adjust layout and save/show figure
plt.tight_layout()
plt.savefig(f'./test_results/roc_pr_curves/{group}_{method_names[j]}.svg', format='svg', dpi=500)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment