From 96456eed27a77abb69cead1f4d9c483d45fdaf61 Mon Sep 17 00:00:00 2001 From: Eduardo Blancas Date: Wed, 28 Feb 2024 07:59:09 -0600 Subject: [PATCH] lint --- src/sklearn_evaluation/plot/precision_recall.py | 2 +- src/sklearn_evaluation/plot/regression.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sklearn_evaluation/plot/precision_recall.py b/src/sklearn_evaluation/plot/precision_recall.py index 98fb33cd..ce70e3cf 100644 --- a/src/sklearn_evaluation/plot/precision_recall.py +++ b/src/sklearn_evaluation/plot/precision_recall.py @@ -317,7 +317,7 @@ def plot(self, ax=None): # User passes list of list for binary r = r[0] if isinstance(r[0], (list, np.ndarray)) else r p = p[0] if isinstance(p[0], (list, np.ndarray)) else p - label = f"{l}, AUC={auc(r,p):.2f}" + label = f"{l}, AUC={auc(r, p):.2f}" ax = _plot_metrics_binary(p, r, label, ax) _set_ax_settings(ax, "Precision Recall compare") diff --git a/src/sklearn_evaluation/plot/regression.py b/src/sklearn_evaluation/plot/regression.py index ccda6ce9..c31c98a4 100644 --- a/src/sklearn_evaluation/plot/regression.py +++ b/src/sklearn_evaluation/plot/regression.py @@ -149,7 +149,7 @@ def prediction_error(y_true, y_pred, ax=None): # R2 r2 = model.score(y_reshaped, y_pred) - plt.plot([], [], " ", label=f"R2 = {round(r2,5)}") + plt.plot([], [], " ", label=f"R2 = {round(r2, 5)}") _set_ax_settings(ax, "y_true", "y_pred", "Prediction Error") ax.legend(loc="upper left")