Skip to content

Commit 767c63e

Browse files
authored
Merge pull request #1751 from HypothesisWorks/DRMacIver/each-pair-ordering
Iterate over block pairs in a different order.
2 parents c44414c + 3777423 commit 767c63e

File tree

3 files changed

+61
-40
lines changed

3 files changed

+61
-40
lines changed

hypothesis-python/RELEASE.rst

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
RELEASE_TYPE: patch
2+
3+
This changes the order that the shrinker tries certain operations in its "emergency" phase which runs late in the process.
4+
The new order should be better at avoiding long stalls where the shrinker is failing to make progress,
5+
which may be helpful if you have difficult to shrink test cases.
6+
However this will not be noticeable in the vast majority of use cases.

hypothesis-python/src/hypothesis/internal/conjecture/shrinker.py

+22-16
Original file line numberDiff line numberDiff line change
@@ -570,24 +570,30 @@ def each_pair_of_blocks(self, accept_first, accept_second):
570570
"""Yield each pair of blocks ``(a, b)``, such that ``a.index <
571571
b.index``, but only if ``accept_first(a)`` and ``accept_second(b)`` are
572572
both true."""
573-
i = 0
574-
while i < len(self.blocks):
575-
j = i + 1
576-
while j < len(self.blocks):
573+
574+
# Iteration order here is significant: Rather than fixing i and looping
575+
# over each j, then doing the same, etc. we iterate over the gap between
576+
# i and j and then over i. The reason for this is that it ensures that
577+
# we try a different value for i and j on each iteration of the inner
578+
# loop. This stops us from stalling if we happen to hit on a value of i
579+
# where nothing useful can be done.
580+
#
581+
# In the event that nothing works, this doesn't help and we still make
582+
# the same number of calls, but by ensuring that we make progress we
583+
# have more opportunities to make shrinks that speed up the tests or
584+
# that reduce the number of viable shrinks at the next gap size because
585+
# we've lowered some values.
586+
offset = 1
587+
while offset < len(self.blocks):
588+
i = 0
589+
while i + offset < len(self.blocks):
590+
j = i + offset
577591
block_i = self.blocks[i]
578-
if not accept_first(block_i):
579-
break
580592
block_j = self.blocks[j]
581-
if not accept_second(block_j):
582-
j += 1
583-
continue
584-
585-
yield (block_i, block_j)
586-
# After this point, the shrink target could have changed,
587-
# so blocks need to be re-checked.
588-
589-
j += 1
590-
i += 1
593+
if accept_first(block_i) and accept_second(block_j):
594+
yield (block_i, block_j)
595+
i += 1
596+
offset += 1
591597

592598
def pass_to_descendant(self):
593599
"""Attempt to replace each example with a descendant example.

hypothesis-python/tests/nocover/test_conjecture_engine.py

+33-24
Original file line numberDiff line numberDiff line change
@@ -214,12 +214,12 @@ def shrinker(data):
214214
data.draw_bits(1)
215215
data.mark_interesting()
216216

217-
bounds = [
217+
bounds = {
218218
(a.bounds, b.bounds)
219219
for a, b in shrinker.each_pair_of_blocks(lambda block: True, lambda block: True)
220-
]
220+
}
221221

222-
assert bounds == [((0, 1), (1, 2)), ((0, 1), (2, 3)), ((1, 2), (2, 3))]
222+
assert bounds == {((0, 1), (1, 2)), ((0, 1), (2, 3)), ((1, 2), (2, 3))}
223223

224224

225225
def test_each_pair_of_blocks_with_filters():
@@ -231,17 +231,18 @@ def shrinker(data):
231231
data.draw_bits(1)
232232
data.mark_interesting()
233233

234-
blocks = [
234+
blocks = {
235235
(a.index, b.index)
236236
for a, b in shrinker.each_pair_of_blocks(
237237
lambda block: block.index != 1, lambda block: block.index != 3
238238
)
239-
]
239+
}
240240

241-
assert blocks == [(0, 1), (0, 2), (0, 4), (2, 4), (3, 4)]
241+
assert blocks == {(0, 1), (0, 2), (0, 4), (2, 4), (3, 4)}
242242

243243

244-
def test_each_pair_of_blocks_handles_change():
244+
@pytest.mark.parametrize("intervene_at", [(0, 1), (0, 6), (1, 3)])
245+
def test_each_pair_of_blocks_handles_change(intervene_at):
245246
initial = hbytes([9] + [0] * 10)
246247

247248
@shrinking_from(initial)
@@ -251,20 +252,28 @@ def shrinker(data):
251252
data.draw_bits(1)
252253
data.mark_interesting()
253254

254-
blocks = []
255-
for a, b in shrinker.each_pair_of_blocks(lambda block: True, lambda block: True):
256-
if a.index == 0 and b.index == 6:
257-
shrinker.incorporate_new_buffer(hbytes([3] + [0] * 10))
258-
blocks.append((a.index, b.index))
259-
260-
assert blocks == [
261-
(0, 1),
262-
(0, 2),
263-
(0, 3),
264-
(0, 4),
265-
(0, 5),
266-
(0, 6),
267-
(1, 2),
268-
(1, 3),
269-
(2, 3),
270-
]
255+
def blocks(intervene=False):
256+
blocks = []
257+
for a, b in shrinker.each_pair_of_blocks(
258+
lambda block: True, lambda block: True
259+
):
260+
assert a.index < b.index < len(shrinker.shrink_target.blocks)
261+
if intervene and (a.index, b.index) == intervene_at:
262+
shrinker.incorporate_new_buffer(hbytes([3] + [0] * 10))
263+
blocks.append((a.index, b.index))
264+
return blocks
265+
266+
original_blocks = blocks()
267+
blocks_with_intervention = blocks(intervene=True)
268+
blocks_after_intervention = blocks()
269+
270+
# We should not abort when the change happens but should carry on to include
271+
# every pair of indices that we would have if we'd iterated after the change.
272+
assert set(blocks_after_intervention).issubset(blocks_with_intervention)
273+
274+
# Changing the iteration order shouldn't introduce new possible block pairs.
275+
assert set(blocks_with_intervention).issubset(original_blocks)
276+
277+
# We should however not do that by repeating any pairs of block indexes -
278+
# repetition should happen the next time the relevant passes are run.
279+
assert len(set(blocks_with_intervention)) == len(blocks_with_intervention)

0 commit comments

Comments
 (0)