Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 2480f5a

Browse files
tomasrhughesthomasjpfancmaureirglemaitre
authored
DOC Ensures that K/RadiusNeighborsRegressor passes numpydoc validation (#20378)
Co-authored-by: Thomas J. Fan <[email protected]> Co-authored-by: Cristián Maureira-Fredes <[email protected]> Co-authored-by: Guillaume Lemaitre <[email protected]>
1 parent ac59623 commit 2480f5a

File tree

3 files changed

+32
-32
lines changed

3 files changed

+32
-32
lines changed

maint_tools/test_docstrings.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,6 @@
106106
"RBFSampler",
107107
"RFE",
108108
"RadiusNeighborsClassifier",
109-
"RadiusNeighborsRegressor",
110109
"RadiusNeighborsTransformer",
111110
"RandomTreesEmbedding",
112111
"RandomizedSearchCV",

sklearn/neighbors/_base.py

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""Base and mixin classes for nearest neighbors"""
1+
"""Base and mixin classes for nearest neighbors."""
22
# Authors: Jake Vanderplas <[email protected]>
33
# Fabian Pedregosa <[email protected]>
44
# Alexandre Gramfort <[email protected]>
@@ -88,7 +88,7 @@ def _check_weights(weights):
8888

8989

9090
def _get_weights(dist, weights):
91-
"""Get the weights from an array of distances and a parameter ``weights``
91+
"""Get the weights from an array of distances and a parameter ``weights``.
9292
9393
Parameters
9494
----------
@@ -135,7 +135,7 @@ def _get_weights(dist, weights):
135135

136136

137137
def _is_sorted_by_data(graph):
138-
"""Returns whether the graph's non-zero entries are sorted by data
138+
"""Return whether the graph's non-zero entries are sorted by data.
139139
140140
The non-zero entries are stored in graph.data and graph.indices.
141141
For each row (or sample), the non-zero entries can be either:
@@ -162,7 +162,7 @@ def _is_sorted_by_data(graph):
162162

163163

164164
def _check_precomputed(X):
165-
"""Check precomputed distance matrix
165+
"""Check precomputed distance matrix.
166166
167167
If the precomputed distance matrix is sparse, it checks that the non-zero
168168
entries are sorted by distances. If not, the matrix is copied and sorted.
@@ -223,7 +223,7 @@ def _check_precomputed(X):
223223

224224

225225
def _kneighbors_from_graph(graph, n_neighbors, return_distance):
226-
"""Decompose a nearest neighbors sparse graph into distances and indices
226+
"""Decompose a nearest neighbors sparse graph into distances and indices.
227227
228228
Parameters
229229
----------
@@ -275,7 +275,7 @@ def extract(a):
275275

276276

277277
def _radius_neighbors_from_graph(graph, radius, return_distance):
278-
"""Decompose a nearest neighbors sparse graph into distances and indices
278+
"""Decompose a nearest neighbors sparse graph into distances and indices.
279279
280280
Parameters
281281
----------
@@ -589,7 +589,7 @@ def _pairwise(self):
589589

590590

591591
def _tree_query_parallel_helper(tree, *args, **kwargs):
592-
"""Helper for the Parallel calls in KNeighborsMixin.kneighbors
592+
"""Helper for the Parallel calls in KNeighborsMixin.kneighbors.
593593
594594
The Cython method tree.query is not directly picklable by cloudpickle
595595
under PyPy.
@@ -598,10 +598,10 @@ def _tree_query_parallel_helper(tree, *args, **kwargs):
598598

599599

600600
class KNeighborsMixin:
601-
"""Mixin for k-neighbors searches"""
601+
"""Mixin for k-neighbors searches."""
602602

603603
def _kneighbors_reduce_func(self, dist, start, n_neighbors, return_distance):
604-
"""Reduce a chunk of distances to the nearest neighbors
604+
"""Reduce a chunk of distances to the nearest neighbors.
605605
606606
Callback to :func:`sklearn.metrics.pairwise.pairwise_distances_chunked`
607607
@@ -851,7 +851,8 @@ def kneighbors_graph(self, X=None, n_neighbors=None, mode="connectivity"):
851851
852852
See Also
853853
--------
854-
NearestNeighbors.radius_neighbors_graph: Computes a graph of neighbors.
854+
NearestNeighbors.radius_neighbors_graph : Compute the (weighted) graph
855+
of Neighbors for points in X.
855856
856857
Examples
857858
--------
@@ -901,7 +902,7 @@ def kneighbors_graph(self, X=None, n_neighbors=None, mode="connectivity"):
901902

902903

903904
def _tree_query_radius_parallel_helper(tree, *args, **kwargs):
904-
"""Helper for the Parallel calls in RadiusNeighborsMixin.radius_neighbors
905+
"""Helper for the Parallel calls in RadiusNeighborsMixin.radius_neighbors.
905906
906907
The Cython method tree.query_radius is not directly picklable by
907908
cloudpickle under PyPy.
@@ -910,10 +911,10 @@ def _tree_query_radius_parallel_helper(tree, *args, **kwargs):
910911

911912

912913
class RadiusNeighborsMixin:
913-
"""Mixin for radius-based neighbors searches"""
914+
"""Mixin for radius-based neighbors searches."""
914915

915916
def _radius_neighbors_reduce_func(self, dist, start, radius, return_distance):
916-
"""Reduce a chunk of distances to the nearest neighbors
917+
"""Reduce a chunk of distances to the nearest neighbors.
917918
918919
Callback to :func:`sklearn.metrics.pairwise.pairwise_distances_chunked`
919920

sklearn/neighbors/_regression.py

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""Nearest Neighbor Regression"""
1+
"""Nearest Neighbor Regression."""
22

33
# Authors: Jake Vanderplas <[email protected]>
44
# Fabian Pedregosa <[email protected]>
@@ -263,7 +263,7 @@ class RadiusNeighborsRegressor(RadiusNeighborsMixin, RegressorMixin, NeighborsBa
263263
queries.
264264
265265
weights : {'uniform', 'distance'} or callable, default='uniform'
266-
weight function used in prediction. Possible values:
266+
Weight function used in prediction. Possible values:
267267
268268
- 'uniform' : uniform weights. All points in each neighborhood
269269
are weighted equally.
@@ -300,7 +300,7 @@ class RadiusNeighborsRegressor(RadiusNeighborsMixin, RegressorMixin, NeighborsBa
300300
(l2) for p = 2. For arbitrary p, minkowski_distance (l_p) is used.
301301
302302
metric : str or callable, default='minkowski'
303-
the distance metric to use for the tree. The default metric is
303+
The distance metric to use for the tree. The default metric is
304304
minkowski, and with p=2 is equivalent to the standard Euclidean
305305
metric. See the documentation of :class:`DistanceMetric` for a
306306
list of available metrics.
@@ -338,6 +338,20 @@ class RadiusNeighborsRegressor(RadiusNeighborsMixin, RegressorMixin, NeighborsBa
338338
n_samples_fit_ : int
339339
Number of samples in the fitted data.
340340
341+
See Also
342+
--------
343+
NearestNeighbors : Regression based on nearest neighbors.
344+
KNeighborsRegressor : Regression based on k-nearest neighbors.
345+
KNeighborsClassifier : Classifier based on the k-nearest neighbors.
346+
RadiusNeighborsClassifier : Classifier based on neighbors within a given radius.
347+
348+
Notes
349+
-----
350+
See :ref:`Nearest Neighbors <neighbors>` in the online documentation
351+
for a discussion of the choice of ``algorithm`` and ``leaf_size``.
352+
353+
https://en.wikipedia.org/wiki/K-nearest_neighbor_algorithm
354+
341355
Examples
342356
--------
343357
>>> X = [[0], [1], [2], [3]]
@@ -348,20 +362,6 @@ class RadiusNeighborsRegressor(RadiusNeighborsMixin, RegressorMixin, NeighborsBa
348362
RadiusNeighborsRegressor(...)
349363
>>> print(neigh.predict([[1.5]]))
350364
[0.5]
351-
352-
See Also
353-
--------
354-
NearestNeighbors
355-
KNeighborsRegressor
356-
KNeighborsClassifier
357-
RadiusNeighborsClassifier
358-
359-
Notes
360-
-----
361-
See :ref:`Nearest Neighbors <neighbors>` in the online documentation
362-
for a discussion of the choice of ``algorithm`` and ``leaf_size``.
363-
364-
https://en.wikipedia.org/wiki/K-nearest_neighbor_algorithm
365365
"""
366366

367367
def __init__(
@@ -410,7 +410,7 @@ def fit(self, X, y):
410410
return self._fit(X, y)
411411

412412
def predict(self, X):
413-
"""Predict the target for the provided data
413+
"""Predict the target for the provided data.
414414
415415
Parameters
416416
----------

0 commit comments

Comments
 (0)