Skip to content

Commit

Permalink
Merge pull request #435 from ChrisCummins/download-failure-retry
Browse files Browse the repository at this point in the history
[util] Increase persistence on "Too many Requests" errors.
  • Loading branch information
ChrisCummins authored Sep 29, 2021
2 parents abb8f86 + 0d9e1d3 commit d5ba02c
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
4 changes: 2 additions & 2 deletions compiler_gym/util/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def _download(urls: List[str], sha256: Optional[str], max_retries: int) -> bytes

# A retry loop, and loop over all urls provided.
last_exception = None
wait_time = 5
wait_time = 10
for _ in range(max(max_retries, 1)):
for url in urls:
try:
Expand All @@ -99,7 +99,7 @@ def _download(urls: List[str], sha256: Optional[str], max_retries: int) -> bytes


def download(
urls: Union[str, List[str]], sha256: Optional[str] = None, max_retries: int = 3
urls: Union[str, List[str]], sha256: Optional[str] = None, max_retries: int = 5
) -> bytes:
"""Download a file and return its contents.
Expand Down
3 changes: 2 additions & 1 deletion tests/util/download_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@ def patched_download(*args):

assert download._do_download_attempt.call_count == max_retries
assert download.sleep.call_count == max_retries
download.sleep.assert_called_with(5 * 1.5 ** (max_retries - 1))
starting_wait_time = 10 # The initial wait time in seconds.
download.sleep.assert_called_with(starting_wait_time * 1.5 ** (max_retries - 1))


@pytest.mark.parametrize("max_retries", [1, 2, 3, 5, 10])
Expand Down

0 comments on commit d5ba02c

Please sign in to comment.