Skip to content

Commit

Permalink
BUG: Ignore fewer errors during array-coercion
Browse files Browse the repository at this point in the history
This changes it so that we only ignore attribute errors on
looking up `__array__` and propagate errors when checking
for sequences `len(obj)` if those errors are either
RecursionError or MemoryError (we consider them unrecoverable).

Also adds test for bad recursive array-like with sequence
as reported in numpygh-17785. The test might be flaky/more complicated
in which case it should probably just be deleted.
  • Loading branch information
seberg committed Nov 24, 2020
1 parent b88b2c0 commit b17dd63
Show file tree
Hide file tree
Showing 3 changed files with 48 additions and 4 deletions.
18 changes: 16 additions & 2 deletions numpy/core/src/multiarray/array_coercion.c
Expand Up @@ -979,14 +979,28 @@ PyArray_DiscoverDTypeAndShape_Recursive(
* and to handle it recursively. That is, unless we have hit the
* dimension limit.
*/
npy_bool is_sequence = (PySequence_Check(obj) && PySequence_Size(obj) >= 0);
npy_bool is_sequence = PySequence_Check(obj);
if (is_sequence) {
is_sequence = PySequence_Size(obj) >= 0;
if (NPY_UNLIKELY(!is_sequence)) {
/* NOTE: This should likely just raise all errors */
if (PyErr_ExceptionMatches(PyExc_RecursionError) ||
PyErr_ExceptionMatches(PyExc_MemoryError)) {
/*
* Consider these unrecoverable errors, continuing execution
* might crash the interpreter.
*/
return -1;
}
PyErr_Clear();
}
}
if (NPY_UNLIKELY(*flags & DISCOVER_TUPLES_AS_ELEMENTS) &&
PyTuple_Check(obj)) {
is_sequence = NPY_FALSE;
}
if (curr_dims == max_dims || !is_sequence) {
/* Clear any PySequence_Size error which would corrupts further calls */
PyErr_Clear();
max_dims = handle_scalar(
obj, curr_dims, &max_dims, out_descr, out_shape, fixed_DType,
flags, NULL);
Expand Down
4 changes: 2 additions & 2 deletions numpy/core/src/multiarray/ctors.c
Expand Up @@ -2122,7 +2122,7 @@ PyArray_FromInterface(PyObject *origin)

if (iface == NULL) {
if (PyErr_Occurred()) {
PyErr_Clear(); /* TODO[gh-14801]: propagate crashes during attribute access? */
return NULL;
}
return Py_NotImplemented;
}
Expand Down Expand Up @@ -2390,7 +2390,7 @@ PyArray_FromArrayAttr(PyObject *op, PyArray_Descr *typecode, PyObject *context)
array_meth = PyArray_LookupSpecial_OnInstance(op, "__array__");
if (array_meth == NULL) {
if (PyErr_Occurred()) {
PyErr_Clear(); /* TODO[gh-14801]: propagate crashes during attribute access? */
return NULL;
}
return Py_NotImplemented;
}
Expand Down
30 changes: 30 additions & 0 deletions numpy/core/tests/test_array_coercion.py
Expand Up @@ -689,3 +689,33 @@ def test_too_large_array_error_paths(self):
np.array(arr)
with pytest.raises(MemoryError):
np.array([arr])

@pytest.mark.parametrize("attribute",
["__array_interface__", "__array__", "__array_struct__"])
def test_bad_array_like_attributes(self, attribute):
# Check that errors during attribute retrieval are raised unless
# they are Attribute errors.

class BadInterface:
def __getattr__(self, attr):
if attr == attribute:
raise RuntimeError
super().__getattr__(attr)

with pytest.raises(RuntimeError):
np.array(BadInterface())

@pytest.mark.parametrize("error", [RecursionError, MemoryError])
def test_bad_array_like_bad_length(self, error):
# RecursionError and MemoryError are considered "critical" in
# sequences. We could expand this more generally though. (NumPy 1.20)
class BadSequence:
def __len__(self):
raise error
def __getitem__(self):
# must have getitem to be a Sequence
return 1

with pytest.raises(error):
np.array(BadSequence())

0 comments on commit b17dd63

Please sign in to comment.