fix: random ticker stops working when no receiver (#1653)

This commit is contained in:
abelanger5
2025-04-30 15:44:22 -04:00
committed by GitHub
parent f474c34a19
commit fa7ab2bd75
2 changed files with 49 additions and 3 deletions

View File

@@ -67,13 +67,15 @@ func (rt *RandomTicker) loop() {
close(c)
return
case <-t.C:
t.Stop()
// non-blocking write
select {
case rt.C <- time.Now():
t.Stop()
t = time.NewTimer(rt.nextInterval())
default:
// there could be noone receiving...
}
t = time.NewTimer(rt.nextInterval())
}
}
}

View File

@@ -59,3 +59,47 @@ func TestRandomTicker(t *testing.T) {
t.Fatal("expected to receive close channel signal")
}
}
// TestRandomTickerUnblockingIssue is a regression test for a bug in the original implementation
// where the ticker would stop generating new events if no one was reading from the channel.
func TestRandomTickerUnblockingIssue(t *testing.T) {
minDuration := 50 * time.Millisecond
maxDuration := 100 * time.Millisecond
// Create the random ticker
rt := randomticker.NewRandomTicker(minDuration, maxDuration)
defer rt.Stop()
// Get the first tick to make sure it's working
select {
case <-rt.C:
// Good, we got a tick
case <-time.After(maxDuration * 2):
t.Fatal("didn't receive initial tick in the expected timeframe")
}
// Now simulate a scenario where the consumer isn't reading from the channel
// by just waiting without reading from rt.C
time.Sleep(maxDuration * 2)
// After ignoring the channel for a while, now try to read from it again
// With the bug, this would hang because no new ticks are generated
// With the fix, we should get a new tick within 2*maxDuration
tickCount := 0
timeout := time.After(maxDuration * 5) // Give it plenty of time to tick
for tickCount < 3 { // Try to get 3 more ticks
select {
case <-rt.C:
tickCount++
case <-timeout:
// With the original implementation, we'll hit this timeout
t.Fatalf("only received %d ticks after ignoring the channel; ticker appears stuck", tickCount)
return
}
}
// If we get here, the ticker continued to generate events even when
// we weren't reading from the channel, which means the fix is working
}