From fc6fefdd345df82324ab1d6d090d34b063998f02 Mon Sep 17 00:00:00 2001 From: trajan0x <83933037+trajan0x@users.noreply.github.com> Date: Wed, 23 Aug 2023 12:34:06 -0400 Subject: [PATCH] Make core/retry defaults more sane (#1275) Co-authored-by: Trajan0x --- core/retry/retry.go | 66 +++++++++++++++---- core/retry/retry_test.go | 24 ++++++- core/tunnel/suite_test.go | 2 +- .../consumer/parser/tokendata/cache.go | 6 +- .../consumer/parser/tokenpool/cache.go | 2 +- 5 files changed, 79 insertions(+), 21 deletions(-) diff --git a/core/retry/retry.go b/core/retry/retry.go index 3e752e070d..2f25007e39 100644 --- a/core/retry/retry.go +++ b/core/retry/retry.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "github.com/jpillora/backoff" + errorUtil "github.com/pkg/errors" "time" ) @@ -13,12 +14,27 @@ type RetryableFunc func(ctx context.Context) error // retryWithBackoffConfig holds the configuration for WithBackoff. type retryWithBackoffConfig struct { - factor float64 - jitter bool - min time.Duration - max time.Duration + factor float64 + jitter bool + min time.Duration + max time.Duration + // maxAttempts sets the maximum number of retry attempts. + // if this is negative it is ignored maxAttempts int maxAttemptTime time.Duration + // maxAllAttempts sets the maximum time for all attempts. + // if this is negative it is ignored + maxAllAttemptsTime time.Duration +} + +// returns true if the number of attempts exceeds the maximum number of attempts. +func (r *retryWithBackoffConfig) exceedsMaxAttempts(attempts int) bool { + return r.maxAttempts > 0 && attempts > r.maxAttempts +} + +// returns true if the time for all attempts exceeds the maximum time for all attempts. +func (r *retryWithBackoffConfig) exceedsMaxTime(startTime time.Time) bool { + return r.maxAllAttemptsTime > 0 && time.Since(startTime) > r.maxAllAttemptsTime } // WithBackoffConfigurator configures a retryWithBackoffConfig. @@ -59,21 +75,28 @@ func WithMaxAttempts(maxAttempts int) WithBackoffConfigurator { } } -// WithMaxAttemptsTime sets the maximum time of all retry attempts. -func WithMaxAttemptsTime(maxAttemptTime time.Duration) WithBackoffConfigurator { +// WithMaxAttemptTime sets the maximum time of all retry attempts. +func WithMaxAttemptTime(maxAttemptTime time.Duration) WithBackoffConfigurator { return func(c *retryWithBackoffConfig) { c.maxAttemptTime = maxAttemptTime } } +// WithMaxTotalTime sets the maximum time of all retry attempts combined. +func WithMaxTotalTime(maxTotalTime time.Duration) WithBackoffConfigurator { + return func(c *retryWithBackoffConfig) { + c.maxAllAttemptsTime = maxTotalTime + } +} + func defaultConfig() retryWithBackoffConfig { return retryWithBackoffConfig{ - factor: 2, - jitter: true, - min: 200 * time.Millisecond, - max: 5 * time.Second, - // TODO: default to negative, do not enforce a max when negative - maxAttempts: 3, + factor: 2, + jitter: true, + min: 200 * time.Millisecond, + max: 5 * time.Second, + maxAttempts: -1, + maxAllAttemptsTime: time.Second * 30, } } @@ -93,8 +116,10 @@ func WithBackoff(ctx context.Context, doFunc RetryableFunc, configurators ...Wit } timeout := time.Duration(0) + startTime := time.Now() + attempts := 0 - for attempts < config.maxAttempts { + for !config.exceedsMaxAttempts(attempts) && !config.exceedsMaxTime(startTime) { select { case <-ctx.Done(): return fmt.Errorf("%w while retrying", ctx.Err()) @@ -120,8 +145,21 @@ func WithBackoff(ctx context.Context, doFunc RetryableFunc, configurators ...Wit } } - return ErrMaxAttempts + if config.exceedsMaxAttempts(attempts) { + return errorUtil.Wrapf(ErrMaxAttempts, "after %d attempts", attempts) + } + if config.exceedsMaxTime(startTime) { + return errorUtil.Wrapf(ErrMaxTime, "after %s (max was %s)", time.Since(startTime).String(), config.maxAllAttemptsTime.String()) + } + + return ErrUnknown } // ErrMaxAttempts is returned when the maximum number of retry attempts is reached. var ErrMaxAttempts = errors.New("max attempts reached") + +// ErrMaxTime is returned when the maximum time for all retry attempts is reached. +var ErrMaxTime = errors.New("max time reached") + +// ErrUnknown is returned when an unknown error occurs. +var ErrUnknown = errors.New("unknown error") diff --git a/core/retry/retry_test.go b/core/retry/retry_test.go index 24a5d21b5d..62f59beb6f 100644 --- a/core/retry/retry_test.go +++ b/core/retry/retry_test.go @@ -11,6 +11,7 @@ import ( "time" ) +// nolint: cyclop func TestRetryWithBackoff(t *testing.T) { // Test a function that succeeds on the first attempt. t.Run("Success", func(t *testing.T) { @@ -88,7 +89,7 @@ func TestRetryWithBackoff(t *testing.T) { Equal(t, withMax, newCfg.GetMax()) }) - t.Run("WithMaxAttemptsTime", func(t *testing.T) { + t.Run("WithMaxAttemptTime", func(t *testing.T) { err := retry.WithBackoff(context.Background(), func(ctx context.Context) error { select { case <-ctx.Done(): @@ -96,7 +97,26 @@ func TestRetryWithBackoff(t *testing.T) { case <-time.After(time.Millisecond): return nil } - }, retry.WithMaxAttemptsTime(1)) + }, retry.WithMaxAttemptTime(1), retry.WithMaxAttempts(3)) NotNil(t, err) }) + + t.Run("WithMaxTotalTime", func(t *testing.T) { + startTime := time.Now() + const testDuration = time.Second + + err := retry.WithBackoff(context.Background(), func(ctx context.Context) error { + select { + case <-ctx.Done(): + return fmt.Errorf("context canceled: %w", ctx.Err()) + case <-time.After(time.Millisecond): + return fmt.Errorf("ima fail") + } + }, retry.WithMaxTotalTime(testDuration)) + NotNil(t, err) + + if time.Since(startTime) < time.Second { + t.Errorf("Expected to run for at least %s second, but ran for %s", testDuration.String(), time.Since(startTime)) + } + }) } diff --git a/core/tunnel/suite_test.go b/core/tunnel/suite_test.go index 09a2203cbd..a9c5dced45 100644 --- a/core/tunnel/suite_test.go +++ b/core/tunnel/suite_test.go @@ -76,7 +76,7 @@ func (n *TunnelSuite) startServer(ctx context.Context) { } return nil - }, retry.WithMin(time.Millisecond), retry.WithMax(time.Second), retry.WithMaxAttemptsTime(time.Second*30)) + }, retry.WithMin(time.Millisecond), retry.WithMax(time.Second), retry.WithMaxAttemptTime(time.Second*30)) n.Require().NoError(err) } diff --git a/services/explorer/consumer/parser/tokendata/cache.go b/services/explorer/consumer/parser/tokendata/cache.go index fd49b29ccb..bda59631bf 100644 --- a/services/explorer/consumer/parser/tokendata/cache.go +++ b/services/explorer/consumer/parser/tokendata/cache.go @@ -126,7 +126,7 @@ func (t *tokenDataServiceImpl) retrieveTokenData(parentCtx context.Context, chai res.decimals = tokenData.TokenDecimals return nil - }, retry.WithMaxAttemptsTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt)) + }, retry.WithMaxAttemptTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt)) }) g.Go(func() error { @@ -140,7 +140,7 @@ func (t *tokenDataServiceImpl) retrieveTokenData(parentCtx context.Context, chai res.tokenID = *nullableTokenID return nil - }, retry.WithMaxAttemptsTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt)) + }, retry.WithMaxAttemptTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt)) }) err := g.Wait() @@ -226,7 +226,7 @@ func (t *tokenDataServiceImpl) retrieveCCTPTokenData(parentCtx context.Context, res.decimals = 6 // TODO, as cctp bridging matures, retrieve this data from on chain somehow. return nil - }, retry.WithMaxAttemptsTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt)) + }, retry.WithMaxAttemptTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt)) if err != nil { return nil, fmt.Errorf("could not get token data: %w", err) } diff --git a/services/explorer/consumer/parser/tokenpool/cache.go b/services/explorer/consumer/parser/tokenpool/cache.go index e3976fb4d8..e9306f69f7 100644 --- a/services/explorer/consumer/parser/tokenpool/cache.go +++ b/services/explorer/consumer/parser/tokenpool/cache.go @@ -73,7 +73,7 @@ func (t *tokenPoolDataServiceImpl) GetTokenAddress(parentCtx context.Context, ch //nolint: wrapcheck err = retry.WithBackoff(ctx, func(ctx context.Context) error { return t.storeTokenIndex(ctx, chainID, tokenIndex, tokenAddress, contractAddress) - }, retry.WithMaxAttemptsTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt)) + }, retry.WithMaxAttemptTime(maxAttemptTime), retry.WithMaxAttempts(maxAttempt)) if err != nil { return nil, fmt.Errorf("could not store token index: %w", err) }