Skip to content

Commit

Permalink
Merge pull request #417 from microsoft/fix/retry-handler
Browse files Browse the repository at this point in the history
Fixes Retry Handler delay validation, retry count validation and exponential backoff
  • Loading branch information
Ndiritu authored Dec 2, 2024
2 parents d6080ea + c655fa2 commit 12a11f6
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 19 deletions.
36 changes: 18 additions & 18 deletions packages/http/httpx/kiota_http/middleware/retry_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
from .middleware import BaseMiddleware
from .options import RetryHandlerOption

RETRY_ATTEMPT = "Retry-Attempt"


class RetryHandler(BaseMiddleware):
"""
Expand Down Expand Up @@ -71,43 +73,41 @@ async def send(self, request: httpx.Request, transport: httpx.AsyncBaseTransport
Sends the http request object to the next middleware or retries the request if necessary.
"""
response = None
retry_count = 0

_span = self._create_observability_span(request, "RetryHandler_send")
current_options = self._get_current_options(request)
_span.set_attribute("com.microsoft.kiota.handler.retry.enable", True)
_span.end()
retry_valid = current_options.should_retry
max_delay = current_options.max_delay
_retry_span = self._create_observability_span(
request, f"RetryHandler_send - attempt {retry_count}"
)

while retry_valid:
start_time = time.time()
response = await super().send(request, transport)
_retry_span.set_attribute(HTTP_RESPONSE_STATUS_CODE, response.status_code)
# check that max retries has not been hit
retry_count = 0 if RETRY_ATTEMPT not in response.request.headers else int(
response.request.headers[RETRY_ATTEMPT]
)
_retry_span = self._create_observability_span(
request, f"RetryHandler_send - attempt {retry_count}"
)
retry_valid = self.check_retry_valid(retry_count, current_options)

# Get the delay time between retries
delay = self.get_delay_time(retry_count, response)
delay = self.get_delay_time(retry_count, response, current_options.max_delay)

# Check if the request needs to be retried based on the response method
# and status code
should_retry = self.should_retry(request, current_options, response)
if all([should_retry, retry_valid, delay < max_delay]):
if all([should_retry, retry_valid, delay < RetryHandlerOption.MAX_DELAY]):
time.sleep(delay)
end_time = time.time()
max_delay -= (end_time - start_time)
# increment the count for retries
retry_count += 1
request.headers.update({'retry-attempt': f'{retry_count}'})
request.headers.update({RETRY_ATTEMPT: f'{retry_count}'})
_retry_span.set_attribute(HTTP_RESPONSE_STATUS_CODE, response.status_code)
_retry_span.set_attribute('http.request.resend_count', retry_count)
continue
_retry_span.end()
break
if response is None:
response = await super().send(request, transport)
_retry_span.end()
return response

def _get_current_options(self, request: httpx.Request) -> RetryHandlerOption:
Expand Down Expand Up @@ -169,7 +169,7 @@ def check_retry_valid(self, retry_count, options):
return True
return False

def get_delay_time(self, retry_count, response=None):
def get_delay_time(self, retry_count, response=None, delay=RetryHandlerOption.DEFAULT_DELAY):
"""
Get the time in seconds to delay between retry attempts.
Respects a retry-after header in the response if provided
Expand All @@ -178,15 +178,15 @@ def get_delay_time(self, retry_count, response=None):
retry_after = self._get_retry_after(response)
if retry_after:
return retry_after
return self._get_delay_time_exp_backoff(retry_count)
return self._get_delay_time_exp_backoff(retry_count, delay)

def _get_delay_time_exp_backoff(self, retry_count):
def _get_delay_time_exp_backoff(self, retry_count, delay):
"""
Get time in seconds to delay between retry attempts based on an exponential
backoff value.
"""
exp_backoff_value = self.backoff_factor * +(2**(retry_count - 1))
backoff_value = exp_backoff_value + (random.randint(0, 1000) / 1000)
backoff_value = exp_backoff_value + (random.randint(0, 1000) / 1000) + delay

backoff = min(self.backoff_max, backoff_value)
return backoff
Expand Down
26 changes: 25 additions & 1 deletion packages/http/httpx/tests/middleware_tests/test_retry_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,9 +224,10 @@ def request_handler(request: httpx.Request):
return httpx.Response(200, )
return httpx.Response(
TOO_MANY_REQUESTS,
headers={RETRY_AFTER: "20"},
headers={RETRY_AFTER: "200"}, # value exceeds max delay of 180 secs
)

# Retry-after value takes precedence over the RetryHandlerOption value specified here
handler = RetryHandler(RetryHandlerOption(10, 1, True))
request = httpx.Request(
'GET',
Expand All @@ -237,6 +238,29 @@ def request_handler(request: httpx.Request):
assert resp.status_code == 429
assert RETRY_ATTEMPT not in resp.request.headers

@pytest.mark.asyncio
async def test_max_retries_respected():
"""Test that a request is not retried more than max_retries configured"""

def request_handler(request: httpx.Request):
if RETRY_ATTEMPT in request.headers:
return httpx.Response(200, )
return httpx.Response(
TOO_MANY_REQUESTS,
)

# Retry-after value takes precedence over the RetryHandlerOption value specified here
handler = RetryHandler(RetryHandlerOption(10, 3, True))
request = httpx.Request(
'GET',
BASE_URL,
headers={RETRY_ATTEMPT: '5'} # value exceeds max retries configured
)
mock_transport = httpx.MockTransport(request_handler)
resp = await handler.send(request, mock_transport)
assert resp.status_code == 200
assert RETRY_ATTEMPT in resp.request.headers
assert resp.request.headers[RETRY_ATTEMPT] == '5'

@pytest.mark.asyncio
async def test_retry_options_apply_per_request():
Expand Down

0 comments on commit 12a11f6

Please sign in to comment.