-
Notifications
You must be signed in to change notification settings - Fork 15
feat: add handling for X-RateLimit-Reset header in retry middleware…
#604
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
b88c40e
1a3105e
0e1a7e3
c0c078d
e0cfcf1
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -42,7 +42,7 @@ var statusCodesToRetryLUT = map[int]retryLogic{ | |
| } | ||
|
|
||
| var errRetryNecessary = errors.New("retry with backoff") | ||
| var errRetryAfterHeaderError = errors.New("retry-after is too much in the future") | ||
| var errRetryDelayMaxExceeded = errors.New("suggested retry delay exceeds maximum allowed wait") | ||
|
|
||
| type RetryMiddleware struct { | ||
| nextRoundtripper http.RoundTripper | ||
|
|
@@ -109,7 +109,7 @@ func (rm RetryMiddleware) RoundTrip(req *http.Request) (*http.Response, error) { | |
| response.Header.Set(retryCountHeaderKey, fmt.Sprintf("%d", actualAttempts)) | ||
| } | ||
|
|
||
| // errors from the next round tripper cannot not be retried | ||
| // errors from the next round tripper cannot be retried | ||
| if err != nil { | ||
| return response, backoff.Permanent(err) | ||
| } | ||
|
|
@@ -133,13 +133,21 @@ func (rm RetryMiddleware) RoundTrip(req *http.Request) (*http.Response, error) { | |
| backoffMethod.InitialInterval = time.Duration(retryAfterSeconds) * time.Second | ||
| finalResponse, finalError = backoff.Retry(req.Context(), op, backoff.WithBackOff(backoffMethod)) | ||
|
|
||
| // if retries fail to resolve the issue, we need to unset the locally used error type to not return it from the RoundTripper | ||
| if errors.Is(finalError, errRetryNecessary) { | ||
| finalError = rm.filterRetryError(finalError, actualAttempts) | ||
| return finalResponse, finalError | ||
| } | ||
|
|
||
| // filterRetryError strips sentinel errors used only inside the retry loop so callers receive the last HTTP response. | ||
| func (rm RetryMiddleware) filterRetryError(err error, actualAttempts int) error { | ||
| if errors.Is(err, errRetryNecessary) { | ||
| rm.logger.Warn().Msgf("Retry ultimately failed after %d attempts", actualAttempts) | ||
| finalError = nil | ||
| return nil | ||
| } | ||
|
|
||
| return finalResponse, finalError | ||
| if errors.Is(err, errRetryDelayMaxExceeded) { | ||
| rm.logger.Warn().Msg("Suggested retry delay from Retry-After or X-RateLimit-Reset exceeds maximum allowed wait; returning last HTTP response") | ||
| return nil | ||
| } | ||
| return err | ||
| } | ||
|
|
||
| func getMaxRetryAttempts(response *http.Response, maxAttempts int) int { | ||
|
|
@@ -172,12 +180,23 @@ func shouldRetry(response *http.Response, attempts int, maxAttempts int) error { | |
|
|
||
| // try to read retry-after header if available | ||
| if headerRetryAfterValue := response.Header.Get("Retry-After"); len(headerRetryAfterValue) > 0 { | ||
| fixRetryDelay = parseRetryAfterHeader(headerRetryAfterValue) | ||
| fixRetryDelay = parseRetryDelay(headerRetryAfterValue) | ||
|
|
||
| // if the fix retry delay is too big, we rather fail permanently than blocking too long | ||
| if fixRetryDelay > maxRetryAfter { | ||
| return backoff.Permanent(errRetryDelayMaxExceeded) | ||
| } | ||
| } | ||
|
|
||
| // if the fix retry delay is too big, we rather fail permanently than blocking too long | ||
| if fixRetryDelay > maxRetryAfter { | ||
| return backoff.Permanent(errRetryAfterHeaderError) | ||
| if fixRetryDelay == 0 { | ||
| // try to read X-RateLimit-Reset header if available | ||
| // according to envoy docs: number of seconds until reset of the current time-window | ||
| if headerXRateLimitResetValue := response.Header.Get("X-RateLimit-Reset"); len(headerXRateLimitResetValue) > 0 { | ||
| fixRetryDelay = parseRetryDelay(headerXRateLimitResetValue) | ||
| } | ||
| if fixRetryDelay > maxRetryAfter { | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Question: @robertolopezlopez did we verify if the existing
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. we probably need to find out first how long the rate limiting window lasts, I have just asked the platform team
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
So:
In our code: |
||
| return backoff.Permanent(errRetryDelayMaxExceeded) | ||
| } | ||
| } | ||
|
|
||
| // if a retry after is defined, this is the time to wait for | ||
|
|
@@ -192,7 +211,7 @@ func shouldRetry(response *http.Response, attempts int, maxAttempts int) error { | |
| return nil | ||
| } | ||
|
|
||
| func parseRetryAfterHeader(headerRetryAfterValue string) time.Duration { | ||
| func parseRetryDelay(headerRetryAfterValue string) time.Duration { | ||
| // Retry-After: 1230 | ||
| if tmp, err := strconv.ParseInt(headerRetryAfterValue, 10, 64); err == nil { | ||
| return time.Duration(tmp) * time.Second | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
We can change this to processRetryError or something like that where we still return an error with information so we can surface it.
It seems we have some ErrorCatalog we can use
cc: @PeterSchafer