Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 3f990bc

Browse files
MAINT Parameters validation for metrics.precision_recall_curve (#25698)
Co-authored-by: jeremie du boisberranger <[email protected]>
1 parent 5bb8545 commit 3f990bc

File tree

2 files changed

+11
-2
lines changed

2 files changed

+11
-2
lines changed

sklearn/metrics/_ranking.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -814,6 +814,14 @@ def _binary_clf_curve(y_true, y_score, pos_label=None, sample_weight=None):
814814
return fps, tps, y_score[threshold_idxs]
815815

816816

817+
@validate_params(
818+
{
819+
"y_true": ["array-like"],
820+
"probas_pred": ["array-like"],
821+
"pos_label": [Real, str, "boolean", None],
822+
"sample_weight": ["array-like", None],
823+
}
824+
)
817825
def precision_recall_curve(y_true, probas_pred, *, pos_label=None, sample_weight=None):
818826
"""Compute precision-recall pairs for different probability thresholds.
819827
@@ -839,11 +847,11 @@ def precision_recall_curve(y_true, probas_pred, *, pos_label=None, sample_weight
839847
840848
Parameters
841849
----------
842-
y_true : ndarray of shape (n_samples,)
850+
y_true : array-like of shape (n_samples,)
843851
True binary labels. If labels are not either {-1, 1} or {0, 1}, then
844852
pos_label should be explicitly given.
845853
846-
probas_pred : ndarray of shape (n_samples,)
854+
probas_pred : array-like of shape (n_samples,)
847855
Target scores, can either be probability estimates of the positive
848856
class, or non-thresholded measure of decisions (as returned by
849857
`decision_function` on some classifiers).

sklearn/tests/test_public_functions.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -135,6 +135,7 @@ def _check_function_param_validation(
135135
"sklearn.metrics.multilabel_confusion_matrix",
136136
"sklearn.metrics.mutual_info_score",
137137
"sklearn.metrics.pairwise.additive_chi2_kernel",
138+
"sklearn.metrics.precision_recall_curve",
138139
"sklearn.metrics.precision_recall_fscore_support",
139140
"sklearn.metrics.r2_score",
140141
"sklearn.metrics.roc_curve",

0 commit comments

Comments
 (0)