|
12 | 12 | import urllib.error
|
13 | 13 | import urllib.request
|
14 | 14 | import zipfile
|
15 |
| -from typing import Any, Callable, Dict, IO, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union |
| 15 | +from typing import Any, Callable, Dict, IO, Iterable, List, Optional, Tuple, TypeVar, Union |
16 | 16 | from urllib.parse import urlparse
|
17 | 17 |
|
18 | 18 | import numpy as np
|
|
24 | 24 | USER_AGENT = "pytorch/vision"
|
25 | 25 |
|
26 | 26 |
|
27 |
| -def _save_response_content( |
28 |
| - content: Iterator[bytes], |
29 |
| - destination: Union[str, pathlib.Path], |
30 |
| - length: Optional[int] = None, |
31 |
| -) -> None: |
32 |
| - with open(destination, "wb") as fh, tqdm(total=length) as pbar: |
33 |
| - for chunk in content: |
34 |
| - # filter out keep-alive new chunks |
35 |
| - if not chunk: |
36 |
| - continue |
37 |
| - |
38 |
| - fh.write(chunk) |
39 |
| - pbar.update(len(chunk)) |
40 |
| - |
41 |
| - |
42 | 27 | def _urlretrieve(url: str, filename: Union[str, pathlib.Path], chunk_size: int = 1024 * 32) -> None:
|
43 | 28 | with urllib.request.urlopen(urllib.request.Request(url, headers={"User-Agent": USER_AGENT})) as response:
|
44 |
| - _save_response_content(iter(lambda: response.read(chunk_size), b""), filename, length=response.length) |
| 29 | + with open(filename, "wb") as fh, tqdm(total=response.length) as pbar: |
| 30 | + while chunk := response.read(chunk_size): |
| 31 | + fh.write(chunk) |
| 32 | + pbar.update(len(chunk)) |
45 | 33 |
|
46 | 34 |
|
47 | 35 | def calculate_md5(fpath: Union[str, pathlib.Path], chunk_size: int = 1024 * 1024) -> str:
|
|
0 commit comments