Thanks to visit codestin.com
Credit goes to github.com

Skip to content

MAINT,BUG,DOC: Fix errors in _add_newdocs #13876

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Jul 6, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
242 changes: 41 additions & 201 deletions numpy/core/_add_newdocs.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
"""
from __future__ import division, absolute_import, print_function

import sys

from numpy.core import numerictypes as _numerictypes
from numpy.core import dtype
from numpy.core.function_base import add_newdoc
Expand Down Expand Up @@ -1461,57 +1463,58 @@ def luf(lamdaexpr, *args, **kwargs):

""")

add_newdoc('numpy.core.multiarray', 'newbuffer',
"""
newbuffer(size)
if sys.version_info.major < 3:
add_newdoc('numpy.core.multiarray', 'newbuffer',
"""
newbuffer(size)

Return a new uninitialized buffer object.
Return a new uninitialized buffer object.

Parameters
----------
size : int
Size in bytes of returned buffer object.
Parameters
----------
size : int
Size in bytes of returned buffer object.

Returns
-------
newbuffer : buffer object
Returned, uninitialized buffer object of `size` bytes.
Returns
-------
newbuffer : buffer object
Returned, uninitialized buffer object of `size` bytes.

""")
""")

add_newdoc('numpy.core.multiarray', 'getbuffer',
"""
getbuffer(obj [,offset[, size]])
add_newdoc('numpy.core.multiarray', 'getbuffer',
"""
getbuffer(obj [,offset[, size]])

Create a buffer object from the given object referencing a slice of
length size starting at offset.
Create a buffer object from the given object referencing a slice of
length size starting at offset.

Default is the entire buffer. A read-write buffer is attempted followed
by a read-only buffer.
Default is the entire buffer. A read-write buffer is attempted followed
by a read-only buffer.

Parameters
----------
obj : object
Parameters
----------
obj : object

offset : int, optional
offset : int, optional

size : int, optional
size : int, optional

Returns
-------
buffer_obj : buffer
Returns
-------
buffer_obj : buffer

Examples
--------
>>> buf = np.getbuffer(np.ones(5), 1, 3)
>>> len(buf)
3
>>> buf[0]
'\\x00'
>>> buf
<read-write buffer for 0x8af1e70, size 3, offset 1 at 0x8ba4ec0>
Examples
--------
>>> buf = np.getbuffer(np.ones(5), 1, 3)
>>> len(buf)
3
>>> buf[0]
'\\x00'
>>> buf
<read-write buffer for 0x8af1e70, size 3, offset 1 at 0x8ba4ec0>

""")
""")

add_newdoc('numpy.core.multiarray', 'c_einsum',
"""
Expand Down Expand Up @@ -1977,13 +1980,6 @@ def luf(lamdaexpr, *args, **kwargs):
"""Array protocol: C-struct side."""))


add_newdoc('numpy.core.multiarray', 'ndarray', ('_as_parameter_',
"""Allow the array to be interpreted as a ctypes object by returning the
data-memory location as an integer

"""))


add_newdoc('numpy.core.multiarray', 'ndarray', ('base',
"""
Base object if memory is from some other object.
Expand Down Expand Up @@ -3231,87 +3227,6 @@ def luf(lamdaexpr, *args, **kwargs):
"""))


add_newdoc('numpy.core.multiarray', 'shares_memory',
"""
shares_memory(a, b, max_work=None)

Determine if two arrays share memory

Parameters
----------
a, b : ndarray
Input arrays
max_work : int, optional
Effort to spend on solving the overlap problem (maximum number
of candidate solutions to consider). The following special
values are recognized:

max_work=MAY_SHARE_EXACT (default)
The problem is solved exactly. In this case, the function returns
True only if there is an element shared between the arrays.
max_work=MAY_SHARE_BOUNDS
Only the memory bounds of a and b are checked.

Raises
------
numpy.TooHardError
Exceeded max_work.

Returns
-------
out : bool

See Also
--------
may_share_memory

Examples
--------
>>> np.may_share_memory(np.array([1,2]), np.array([5,8,9]))
False

""")


add_newdoc('numpy.core.multiarray', 'may_share_memory',
"""
may_share_memory(a, b, max_work=None)

Determine if two arrays might share memory

A return of True does not necessarily mean that the two arrays
share any element. It just means that they *might*.

Only the memory bounds of a and b are checked by default.

Parameters
----------
a, b : ndarray
Input arrays
max_work : int, optional
Effort to spend on solving the overlap problem. See
`shares_memory` for details. Default for ``may_share_memory``
is to do a bounds check.

Returns
-------
out : bool

See Also
--------
shares_memory

Examples
--------
>>> np.may_share_memory(np.array([1,2]), np.array([5,8,9]))
False
>>> x = np.zeros([3, 4])
>>> np.may_share_memory(x[:,0], x[:,1])
True

""")


add_newdoc('numpy.core.multiarray', 'ndarray', ('newbyteorder',
"""
arr.newbyteorder(new_order='S')
Expand Down Expand Up @@ -3413,81 +3328,6 @@ def luf(lamdaexpr, *args, **kwargs):

"""))

add_newdoc('numpy.core.multiarray', 'copyto',
"""
copyto(dst, src, casting='same_kind', where=True)

Copies values from one array to another, broadcasting as necessary.

Raises a TypeError if the `casting` rule is violated, and if
`where` is provided, it selects which elements to copy.

.. versionadded:: 1.7.0

Parameters
----------
dst : ndarray
The array into which values are copied.
src : array_like
The array from which values are copied.
casting : {'no', 'equiv', 'safe', 'same_kind', 'unsafe'}, optional
Controls what kind of data casting may occur when copying.

* 'no' means the data types should not be cast at all.
* 'equiv' means only byte-order changes are allowed.
* 'safe' means only casts which can preserve values are allowed.
* 'same_kind' means only safe casts or casts within a kind,
like float64 to float32, are allowed.
* 'unsafe' means any data conversions may be done.
where : array_like of bool, optional
A boolean array which is broadcasted to match the dimensions
of `dst`, and selects elements to copy from `src` to `dst`
wherever it contains the value True.

""")

add_newdoc('numpy.core.multiarray', 'putmask',
"""
putmask(a, mask, values)

Changes elements of an array based on conditional and input values.

Sets ``a.flat[n] = values[n]`` for each n where ``mask.flat[n]==True``.

If `values` is not the same size as `a` and `mask` then it will repeat.
This gives behavior different from ``a[mask] = values``.

Parameters
----------
a : array_like
Target array.
mask : array_like
Boolean mask array. It has to be the same shape as `a`.
values : array_like
Values to put into `a` where `mask` is True. If `values` is smaller
than `a` it will be repeated.

See Also
--------
place, put, take, copyto

Examples
--------
>>> x = np.arange(6).reshape(2, 3)
>>> np.putmask(x, x>2, x**2)
>>> x
array([[ 0, 1, 2],
[ 9, 16, 25]])

If `values` is smaller than `a` it is repeated:

>>> x = np.arange(5)
>>> np.putmask(x, x>1, [-33, -44])
>>> x
array([ 0, 1, -33, -44, -33])

""")


add_newdoc('numpy.core.multiarray', 'ndarray', ('ravel',
"""
Expand Down
29 changes: 17 additions & 12 deletions numpy/core/function_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -428,6 +428,13 @@ def geomspace(start, stop, num=50, endpoint=True, dtype=None, axis=0):


#always succeed
def _add_docstring(obj, doc):
try:
add_docstring(obj, doc)
except Exception:
pass


def add_newdoc(place, obj, doc):
"""
Adds documentation to obj which is in module place.
Expand All @@ -442,21 +449,19 @@ def add_newdoc(place, obj, doc):
sequence of length two --> [(method1, docstring1),
(method2, docstring2), ...]

This routine never raises an error.
This routine never raises an error if the docstring can't be written, but
will raise an error if the object being documented does not exist.

This routine cannot modify read-only docstrings, as appear
in new-style classes or built-in functions. Because this
routine never raises an error the caller must check manually
that the docstrings were changed.
"""
try:
new = getattr(__import__(place, globals(), {}, [obj]), obj)
if isinstance(doc, str):
add_docstring(new, doc.strip())
elif isinstance(doc, tuple):
add_docstring(getattr(new, doc[0]), doc[1].strip())
elif isinstance(doc, list):
for val in doc:
add_docstring(getattr(new, val[0]), val[1].strip())
except Exception:
pass
new = getattr(__import__(place, globals(), {}, [obj]), obj)
if isinstance(doc, str):
_add_docstring(new, doc.strip())
elif isinstance(doc, tuple):
_add_docstring(getattr(new, doc[0]), doc[1].strip())
elif isinstance(doc, list):
for val in doc:
_add_docstring(getattr(new, val[0]), val[1].strip())
9 changes: 6 additions & 3 deletions numpy/core/multiarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
"""

import functools
import sys
import warnings

from . import overrides
Expand All @@ -15,7 +16,7 @@
from numpy.core._multiarray_umath import *
from numpy.core._multiarray_umath import (
_fastCopyAndTranspose, _flagdict, _insert, _reconstruct, _vec_string,
_ARRAY_API, _monotonicity
_ARRAY_API, _monotonicity, _get_ndarray_c_version
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Fixes pymc-devs/pymc#3340, sympy/sympy#16556, and probably some others.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The name begins with a '_', shouldn't we be discouraging its use rather than adding more private methods to the public API?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Perhaps, but removing it from the API broke downstream packages, and I don't think it was a deliberate choice.

Also, this member is even documented, so I think is intended to be "public but only for library use" or something.

)

__all__ = [
Expand All @@ -30,15 +31,17 @@
'count_nonzero', 'c_einsum', 'datetime_as_string', 'datetime_data',
'digitize', 'dot', 'dragon4_positional', 'dragon4_scientific', 'dtype',
'empty', 'empty_like', 'error', 'flagsobj', 'flatiter', 'format_longfloat',
'frombuffer', 'fromfile', 'fromiter', 'fromstring', 'getbuffer', 'inner',
'frombuffer', 'fromfile', 'fromiter', 'fromstring', 'inner',
'int_asbuffer', 'interp', 'interp_complex', 'is_busday', 'lexsort',
'matmul', 'may_share_memory', 'min_scalar_type', 'ndarray', 'nditer',
'nested_iters', 'newbuffer', 'normalize_axis_index', 'packbits',
'nested_iters', 'normalize_axis_index', 'packbits',
'promote_types', 'putmask', 'ravel_multi_index', 'result_type', 'scalar',
'set_datetimeparse_function', 'set_legacy_print_mode', 'set_numeric_ops',
'set_string_function', 'set_typeDict', 'shares_memory', 'test_interrupt',
'tracemalloc_domain', 'typeinfo', 'unpackbits', 'unravel_index', 'vdot',
'where', 'zeros']
if sys.version_info.major < 3:
__all__ += ['newbuffer', 'getbuffer']


arange.__module__ = 'numpy'
Expand Down