Skip to content

Commit

Permalink
Make core/retry defaults more sane (#1275)
Browse files Browse the repository at this point in the history
Co-authored-by: Trajan0x <trajan0x@users.noreply.github.com>
  • Loading branch information
trajan0x and trajan0x authored Aug 23, 2023
1 parent f3d0a58 commit fc6fefd
Show file tree
Hide file tree
Showing 5 changed files with 79 additions and 21 deletions.
66 changes: 52 additions & 14 deletions core/retry/retry.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"errors"
"fmt"
"github.com/jpillora/backoff"
errorUtil "github.com/pkg/errors"
"time"
)

Expand All @@ -13,12 +14,27 @@ type RetryableFunc func(ctx context.Context) error

// retryWithBackoffConfig holds the configuration for WithBackoff.
type retryWithBackoffConfig struct {
factor float64
jitter bool
min time.Duration
max time.Duration
factor float64
jitter bool
min time.Duration
max time.Duration
// maxAttempts sets the maximum number of retry attempts.
// if this is negative it is ignored
maxAttempts int
maxAttemptTime time.Duration
// maxAllAttempts sets the maximum time for all attempts.
// if this is negative it is ignored
maxAllAttemptsTime time.Duration
}

// returns true if the number of attempts exceeds the maximum number of attempts.
func (r *retryWithBackoffConfig) exceedsMaxAttempts(attempts int) bool {
return r.maxAttempts > 0 && attempts > r.maxAttempts
}

// returns true if the time for all attempts exceeds the maximum time for all attempts.
func (r *retryWithBackoffConfig) exceedsMaxTime(startTime time.Time) bool {
return r.maxAllAttemptsTime > 0 && time.Since(startTime) > r.maxAllAttemptsTime
}

// WithBackoffConfigurator configures a retryWithBackoffConfig.
Expand Down Expand Up @@ -59,21 +75,28 @@ func WithMaxAttempts(maxAttempts int) WithBackoffConfigurator {
}
}

// WithMaxAttemptsTime sets the maximum time of all retry attempts.
func WithMaxAttemptsTime(maxAttemptTime time.Duration) WithBackoffConfigurator {
// WithMaxAttemptTime sets the maximum time of all retry attempts.
func WithMaxAttemptTime(maxAttemptTime time.Duration) WithBackoffConfigurator {
return func(c *retryWithBackoffConfig) {
c.maxAttemptTime = maxAttemptTime
}
}

// WithMaxTotalTime sets the maximum time of all retry attempts combined.
func WithMaxTotalTime(maxTotalTime time.Duration) WithBackoffConfigurator {
return func(c *retryWithBackoffConfig) {
c.maxAllAttemptsTime = maxTotalTime
}
}

func defaultConfig() retryWithBackoffConfig {
return retryWithBackoffConfig{
factor: 2,
jitter: true,
min: 200 * time.Millisecond,
max: 5 * time.Second,
// TODO: default to negative, do not enforce a max when negative
maxAttempts: 3,
factor: 2,
jitter: true,
min: 200 * time.Millisecond,
max: 5 * time.Second,
maxAttempts: -1,
maxAllAttemptsTime: time.Second * 30,
}
}

Expand All @@ -93,8 +116,10 @@ func WithBackoff(ctx context.Context, doFunc RetryableFunc, configurators ...Wit
}

timeout := time.Duration(0)
startTime := time.Now()

attempts := 0
for attempts < config.maxAttempts {
for !config.exceedsMaxAttempts(attempts) && !config.exceedsMaxTime(startTime) {
select {
case <-ctx.Done():
return fmt.Errorf("%w while retrying", ctx.Err())
Expand All @@ -120,8 +145,21 @@ func WithBackoff(ctx context.Context, doFunc RetryableFunc, configurators ...Wit
}
}

return ErrMaxAttempts
if config.exceedsMaxAttempts(attempts) {
return errorUtil.Wrapf(ErrMaxAttempts, "after %d attempts", attempts)
}
if config.exceedsMaxTime(startTime) {
return errorUtil.Wrapf(ErrMaxTime, "after %s (max was %s)", time.Since(startTime).String(), config.maxAllAttemptsTime.String())
}

return ErrUnknown
}

// ErrMaxAttempts is returned when the maximum number of retry attempts is reached.
var ErrMaxAttempts = errors.New("max attempts reached")

// ErrMaxTime is returned when the maximum time for all retry attempts is reached.
var ErrMaxTime = errors.New("max time reached")

// ErrUnknown is returned when an unknown error occurs.
var ErrUnknown = errors.New("unknown error")
24 changes: 22 additions & 2 deletions core/retry/retry_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import (
"time"
)

// nolint: cyclop
func TestRetryWithBackoff(t *testing.T) {
// Test a function that succeeds on the first attempt.
t.Run("Success", func(t *testing.T) {
Expand Down Expand Up @@ -88,15 +89,34 @@ func TestRetryWithBackoff(t *testing.T) {
Equal(t, withMax, newCfg.GetMax())
})

t.Run("WithMaxAttemptsTime", func(t *testing.T) {
t.Run("WithMaxAttemptTime", func(t *testing.T) {
err := retry.WithBackoff(context.Background(), func(ctx context.Context) error {
select {
case <-ctx.Done():
return fmt.Errorf("context canceled: %w", ctx.Err())
case <-time.After(time.Millisecond):
return nil
}
}, retry.WithMaxAttemptsTime(1))
}, retry.WithMaxAttemptTime(1), retry.WithMaxAttempts(3))
NotNil(t, err)
})

t.Run("WithMaxTotalTime", func(t *testing.T) {
startTime := time.Now()
const testDuration = time.Second

err := retry.WithBackoff(context.Background(), func(ctx context.Context) error {
select {
case <-ctx.Done():
return fmt.Errorf("context canceled: %w", ctx.Err())
case <-time.After(time.Millisecond):
return fmt.Errorf("ima fail")
}
}, retry.WithMaxTotalTime(testDuration))
NotNil(t, err)

if time.Since(startTime) < time.Second {
t.Errorf("Expected to run for at least %s second, but ran for %s", testDuration.String(), time.Since(startTime))
}
})
}
2 changes: 1 addition & 1 deletion core/tunnel/suite_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ func (n *TunnelSuite) startServer(ctx context.Context) {
}

return nil
}, retry.WithMin(time.Millisecond), retry.WithMax(time.Second), retry.WithMaxAttemptsTime(time.Second*30))
}, retry.WithMin(time.Millisecond), retry.WithMax(time.Second), retry.WithMaxAttemptTime(time.Second*30))

n.Require().NoError(err)
}
Expand Down
6 changes: 3 additions & 3 deletions services/explorer/consumer/parser/tokendata/cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ func (t *tokenDataServiceImpl) retrieveTokenData(parentCtx context.Context, chai
res.decimals = tokenData.TokenDecimals

return nil
}, retry.WithMaxAttemptsTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt))
}, retry.WithMaxAttemptTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt))
})

g.Go(func() error {
Expand All @@ -140,7 +140,7 @@ func (t *tokenDataServiceImpl) retrieveTokenData(parentCtx context.Context, chai
res.tokenID = *nullableTokenID

return nil
}, retry.WithMaxAttemptsTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt))
}, retry.WithMaxAttemptTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt))
})

err := g.Wait()
Expand Down Expand Up @@ -226,7 +226,7 @@ func (t *tokenDataServiceImpl) retrieveCCTPTokenData(parentCtx context.Context,
res.decimals = 6 // TODO, as cctp bridging matures, retrieve this data from on chain somehow.

return nil
}, retry.WithMaxAttemptsTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt))
}, retry.WithMaxAttemptTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt))
if err != nil {
return nil, fmt.Errorf("could not get token data: %w", err)
}
Expand Down
2 changes: 1 addition & 1 deletion services/explorer/consumer/parser/tokenpool/cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ func (t *tokenPoolDataServiceImpl) GetTokenAddress(parentCtx context.Context, ch
//nolint: wrapcheck
err = retry.WithBackoff(ctx, func(ctx context.Context) error {
return t.storeTokenIndex(ctx, chainID, tokenIndex, tokenAddress, contractAddress)
}, retry.WithMaxAttemptsTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt))
}, retry.WithMaxAttemptTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt))
if err != nil {
return nil, fmt.Errorf("could not store token index: %w", err)
}
Expand Down

0 comments on commit fc6fefd

Please sign in to comment.