Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions doc/whats_new/v1.2.rst
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,10 @@ Changelog
of a binary classification problem. :pr:`22518` by
:user:`Arturo Amor <ArturoAmorQ>`.

- |Fix| Allows `csr_matrix` as input for parameter: `y_true` of
the :func:`metrics.label_ranking_average_precision_score` metric.
:pr:`23442` by :user:`Sean Atukorala <ShehanAT>`

- |Fix| :func:`metrics.ndcg_score` will now trigger a warning when the `y_true`
value contains a negative value. Users may still use negative values, but the
result may not be between 0 and 1. Starting in v1.4, passing in negative
Expand Down
8 changes: 5 additions & 3 deletions sklearn/metrics/_ranking.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from functools import partial

import numpy as np
from scipy.sparse import csr_matrix
from scipy.sparse import csr_matrix, issparse
from scipy.stats import rankdata

from ..utils import assert_all_finite
Expand Down Expand Up @@ -1071,7 +1071,7 @@ def label_ranking_average_precision_score(y_true, y_score, *, sample_weight=None
0.416...
"""
check_consistent_length(y_true, y_score, sample_weight)
y_true = check_array(y_true, ensure_2d=False)
y_true = check_array(y_true, ensure_2d=False, accept_sparse="csr")
y_score = check_array(y_score, ensure_2d=False)

if y_true.shape != y_score.shape:
Expand All @@ -1084,7 +1084,9 @@ def label_ranking_average_precision_score(y_true, y_score, *, sample_weight=None
):
raise ValueError("{0} format is not supported".format(y_type))

y_true = csr_matrix(y_true)
if not issparse(y_true):
y_true = csr_matrix(y_true)

y_score = -y_score

n_samples, n_labels = y_true.shape
Expand Down
9 changes: 9 additions & 0 deletions sklearn/metrics/tests/test_ranking.py
Original file line number Diff line number Diff line change
Expand Up @@ -2042,3 +2042,12 @@ def test_top_k_accuracy_score_warning(y_true, k):
def test_top_k_accuracy_score_error(y_true, y_score, labels, msg):
with pytest.raises(ValueError, match=msg):
top_k_accuracy_score(y_true, y_score, k=2, labels=labels)


def test_label_ranking_avg_precision_score_should_allow_csr_matrix_for_y_true_input():
# Test that label_ranking_avg_precision_score accept sparse y_true.
# Non-regression test for #22575
y_true = csr_matrix([[1, 0, 0], [0, 0, 1]])
y_score = np.array([[0.5, 0.9, 0.6], [0, 0, 1]])
result = label_ranking_average_precision_score(y_true, y_score)
assert result == pytest.approx(2 / 3)