Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Small cleanups, add a DataTree pretty printer #3957

Merged
merged 8 commits into from Apr 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
3 changes: 3 additions & 0 deletions hypothesis-python/src/RELEASE.rst
@@ -0,0 +1,3 @@
RELEASE_TYPE: patch

This patch cleans up some internal code.
Expand Up @@ -1977,6 +1977,7 @@ def __init__(
self.extra_information = ExtraInformation()

self.ir_tree_nodes = ir_tree_prefix
self._node_index = 0
self.start_example(TOP_LABEL)

def __repr__(self):
Expand Down Expand Up @@ -2274,10 +2275,11 @@ def _pooled_kwargs(self, ir_type, kwargs):
def _pop_ir_tree_node(self, ir_type: IRTypeName, kwargs: IRKWargsType) -> IRNode:
assert self.ir_tree_nodes is not None

if self.ir_tree_nodes == []:
if self._node_index == len(self.ir_tree_nodes):
self.mark_overrun()

node = self.ir_tree_nodes.pop(0)
node = self.ir_tree_nodes[self._node_index]
self._node_index += 1
# If we're trying to draw a different ir type at the same location, then
# this ir tree has become badly misaligned. We don't have many good/simple
# options here for realigning beyond giving up.
Expand Down
Expand Up @@ -63,6 +63,15 @@ class Killed:

next_node = attr.ib()

def _repr_pretty_(self, p, cycle):
assert cycle is False
p.text("Killed")


def _node_pretty(ir_type, value, kwargs, *, forced):
forced_marker = " [forced]" if forced else ""
return f"{ir_type} {value}{forced_marker} {kwargs}"


@attr.s(slots=True)
class Branch:
Expand All @@ -79,6 +88,16 @@ def max_children(self):
assert max_children > 0
return max_children

def _repr_pretty_(self, p, cycle):
assert cycle is False
for i, (value, child) in enumerate(self.children.items()):
if i > 0:
p.break_()
p.text(_node_pretty(self.ir_type, value, self.kwargs, forced=False))
with p.indent(2):
p.break_()
p.pretty(child)


@attr.s(slots=True, frozen=True)
class Conclusion:
Expand All @@ -87,6 +106,15 @@ class Conclusion:
status: Status = attr.ib()
interesting_origin: Optional[InterestingOrigin] = attr.ib()

def _repr_pretty_(self, p, cycle):
assert cycle is False
o = self.interesting_origin
# avoid str(o), which can include multiple lines of context
origin = (
"" if o is None else f", {o.exc_type.__name__} at {o.filename}:{o.lineno}"
)
p.text(f"Conclusion ({self.status!r}{origin})")


# The number of max children where, beyond this, it is practically impossible
# for hypothesis to saturate / explore all children nodes in a reasonable time
Expand Down Expand Up @@ -493,6 +521,29 @@ def check_exhausted(self):
)
return self.is_exhausted

def _repr_pretty_(self, p, cycle):
assert cycle is False
indent = 0
for i, (ir_type, kwargs, value) in enumerate(
zip(self.ir_types, self.kwargs, self.values)
):
with p.indent(indent):
if i > 0:
p.break_()
p.text(_node_pretty(ir_type, value, kwargs, forced=i in self.forced))
indent += 2

if isinstance(self.transition, Branch):
if len(self.values) > 0:
p.break_()
p.pretty(self.transition)

if isinstance(self.transition, (Killed, Conclusion)):
with p.indent(indent):
if len(self.values) > 0:
p.break_()
p.pretty(self.transition)


class DataTree:
"""
Expand Down Expand Up @@ -889,6 +940,10 @@ def _reject_child(self, ir_type, kwargs, *, child, key):
if child in children:
children.remove(child)

def _repr_pretty_(self, p, cycle):
assert cycle is False
return p.pretty(self.root)


class TreeRecordingObserver(DataObserver):
def __init__(self, tree):
Expand Down
Expand Up @@ -922,7 +922,7 @@ def reoffset(o):
new_blocks[i] = int_to_bytes(v + o, len(blocked[i]))
return self.incorporate_new_buffer(b"".join(new_blocks))

Integer.shrink(offset, reoffset, random=self.random)
Integer.shrink(offset, reoffset)
self.clear_change_tracking()

def clear_change_tracking(self):
Expand Down Expand Up @@ -1193,7 +1193,6 @@ def minimize_duplicated_blocks(self, chooser):
Lexical.shrink(
block,
lambda b: self.try_shrinking_blocks(targets, b),
random=self.random,
)

@defines_shrink_pass()
Expand Down Expand Up @@ -1236,7 +1235,6 @@ def minimize_floats(self, chooser):
+ [node.copy(with_value=sign * val)]
+ self.nodes[node.index + 1 :]
),
random=self.random,
node=node,
)

Expand Down Expand Up @@ -1362,7 +1360,6 @@ def minimize_individual_blocks(self, chooser):
Lexical.shrink(
self.shrink_target.buffer[u:v],
lambda b: self.try_shrinking_blocks((i,), b),
random=self.random,
)

if self.shrink_target is not initial:
Expand Down Expand Up @@ -1459,7 +1456,6 @@ def test_not_equal(x, y):
],
)
),
random=self.random,
key=lambda i: st.buffer[examples[i].start : examples[i].end],
)

Expand Down
Expand Up @@ -20,7 +20,6 @@ def __init__(
self,
initial,
predicate,
random,
*,
full=False,
debug=False,
Expand All @@ -30,7 +29,6 @@ def __init__(
self.setup(**kwargs)
self.current = self.make_immutable(initial)
self.initial = self.current
self.random = random
self.full = full
self.changes = 0
self.name = name
Expand Down Expand Up @@ -75,7 +73,7 @@ def call_shrinker(self, other_class, initial, predicate, **kwargs):
Note we explicitly do not pass through full.
"""

return other_class.shrink(initial, predicate, random=self.random, **kwargs)
return other_class.shrink(initial, predicate, **kwargs)

def debug(self, *args):
if self.debugging_enabled:
Expand Down Expand Up @@ -155,15 +153,14 @@ def check_invariants(self, value):

Does nothing by default.
"""
raise NotImplementedError

def short_circuit(self):
"""Possibly attempt to do some shrinking.

If this returns True, the ``run`` method will terminate early
without doing any more work.
"""
raise NotImplementedError
return False

def left_is_better(self, left, right):
"""Returns True if the left is strictly simpler than the right
Expand Down
Expand Up @@ -43,16 +43,10 @@ def minimize_as_integer(self):
Integer.shrink(
self.current_int,
lambda c: c == self.current_int or self.incorporate_int(c),
random=self.random,
)

def partial_sort(self):
Ordering.shrink(self.current, self.consider, random=self.random)

def short_circuit(self):
"""This is just an assemblage of other shrinkers, so we rely on their
short circuiting."""
return False
Ordering.shrink(self.current, self.consider)

def run_step(self):
self.minimize_as_integer()
Expand Down
44 changes: 44 additions & 0 deletions hypothesis-python/tests/conjecture/test_data_tree.py
Expand Up @@ -8,6 +8,7 @@
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.

import textwrap
from random import Random

import pytest
Expand All @@ -22,7 +23,9 @@
)
from hypothesis.internal.conjecture.engine import ConjectureRunner
from hypothesis.internal.conjecture.floats import float_to_int
from hypothesis.internal.escalation import InterestingOrigin
from hypothesis.internal.floats import next_up
from hypothesis.vendor import pretty

from tests.conjecture.common import (
draw_boolean_kwargs,
Expand Down Expand Up @@ -567,3 +570,44 @@ def buf(data):
prefix = tree.generate_novel_prefix(Random())
data = ConjectureData.for_buffer(prefix)
assert data.draw_float(min_value, max_value, allow_nan=False) == expected_value


@given(draw_boolean_kwargs(), draw_integer_kwargs())
def test_datatree_repr(bool_kwargs, int_kwargs):
tree = DataTree()

try:
int("not an int")
except ValueError as e:
origin = InterestingOrigin.from_exception(e)

observer = tree.new_observer()
observer.draw_boolean(True, was_forced=False, kwargs=bool_kwargs)
observer.conclude_test(Status.INVALID, interesting_origin=None)

observer = tree.new_observer()
observer.draw_boolean(False, was_forced=False, kwargs=bool_kwargs)
observer.draw_integer(42, was_forced=False, kwargs=int_kwargs)
observer.conclude_test(Status.VALID, interesting_origin=None)

observer = tree.new_observer()
observer.draw_boolean(False, was_forced=False, kwargs=bool_kwargs)
observer.draw_integer(0, was_forced=False, kwargs=int_kwargs)
observer.draw_boolean(False, was_forced=True, kwargs=bool_kwargs)
observer.conclude_test(Status.INTERESTING, interesting_origin=origin)

assert (
pretty.pretty(tree)
== textwrap.dedent(
f"""
boolean True {bool_kwargs}
Conclusion (Status.INVALID)
boolean False {bool_kwargs}
integer 42 {int_kwargs}
Conclusion (Status.VALID)
integer 0 {int_kwargs}
boolean False [forced] {bool_kwargs}
Conclusion (Status.INTERESTING, {origin})
Comment on lines +603 to +610
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nice! Ideas for future:

  • should we sort the child branches to show shrink order?
  • explicitly mark exhausted subtrees?

"""
).strip()
)
26 changes: 10 additions & 16 deletions hypothesis-python/tests/conjecture/test_minimizer.py
Expand Up @@ -15,34 +15,30 @@


def test_shrink_to_zero():
assert Lexical.shrink(bytes([255] * 8), lambda x: True, random=Random(0)) == bytes(
8
)
assert Lexical.shrink(bytes([255] * 8), lambda x: True) == bytes(8)


def test_shrink_to_smallest():
assert Lexical.shrink(
bytes([255] * 8), lambda x: sum(x) > 10, random=Random(0)
) == bytes([0] * 7 + [11])
assert Lexical.shrink(bytes([255] * 8), lambda x: sum(x) > 10) == bytes(
[0] * 7 + [11]
)


def test_float_hack_fails():
assert Lexical.shrink(
bytes([255] * 8), lambda x: x[0] >> 7, random=Random(0)
) == bytes([128] + [0] * 7)
assert Lexical.shrink(bytes([255] * 8), lambda x: x[0] >> 7) == bytes(
[128] + [0] * 7
)


def test_can_sort_bytes_by_reordering():
start = bytes([5, 4, 3, 2, 1, 0])
finish = Lexical.shrink(start, lambda x: set(x) == set(start), random=Random(0))
finish = Lexical.shrink(start, lambda x: set(x) == set(start))
assert finish == bytes([0, 1, 2, 3, 4, 5])


def test_can_sort_bytes_by_reordering_partially():
start = bytes([5, 4, 3, 2, 1, 0])
finish = Lexical.shrink(
start, lambda x: set(x) == set(start) and x[0] > x[-1], random=Random(0)
)
finish = Lexical.shrink(start, lambda x: set(x) == set(start) and x[0] > x[-1])
assert finish == bytes([1, 2, 3, 4, 5, 0])


Expand All @@ -59,7 +55,5 @@ def test_can_sort_bytes_by_reordering_partially2():

def test_can_sort_bytes_by_reordering_partially_not_cross_stationary_element():
start = bytes([5, 3, 0, 2, 1, 4])
finish = Lexical.shrink(
start, lambda x: set(x) == set(start) and x[3] == 2, random=Random(0)
)
finish = Lexical.shrink(start, lambda x: set(x) == set(start) and x[3] == 2)
assert finish <= bytes([0, 3, 5, 2, 1, 4])
16 changes: 4 additions & 12 deletions hypothesis-python/tests/conjecture/test_order_shrinking.py
Expand Up @@ -8,8 +8,6 @@
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.

from random import Random

from hypothesis import example, given, strategies as st
from hypothesis.internal.conjecture.shrinking import Ordering

Expand All @@ -23,22 +21,18 @@ def test_shrinks_down_to_sorted_the_slow_way(ls):
# automatically, but here we test that a single run_step could put the
# list in sorted order anyway if it had to, and that that is just an
# optimisation.
shrinker = Ordering(ls, lambda ls: True, random=Random(0), full=False)
shrinker = Ordering(ls, lambda ls: True, full=False)
shrinker.run_step()
assert list(shrinker.current) == sorted(ls)


def test_can_partially_sort_a_list():
finish = Ordering.shrink(
[5, 4, 3, 2, 1, 0], lambda x: x[0] > x[-1], random=Random(0)
)
finish = Ordering.shrink([5, 4, 3, 2, 1, 0], lambda x: x[0] > x[-1])
assert finish == (1, 2, 3, 4, 5, 0)


def test_can_partially_sort_a_list_2():
finish = Ordering.shrink(
[5, 4, 3, 2, 1, 0], lambda x: x[0] > x[2], random=Random(0), full=True
)
finish = Ordering.shrink([5, 4, 3, 2, 1, 0], lambda x: x[0] > x[2], full=True)
assert finish <= (1, 2, 0, 3, 4, 5)


Expand All @@ -49,9 +43,7 @@ def test_adaptively_shrinks_around_hole():
intended_result = sorted(initial)
intended_result.insert(500, intended_result.pop())

shrinker = Ordering(
initial, lambda ls: ls[500] == 2000, random=Random(0), full=True
)
shrinker = Ordering(initial, lambda ls: ls[500] == 2000, full=True)
shrinker.run()

assert shrinker.current[500] == 2000
Expand Down