|
3 | 3 |
|
4 | 4 | from io import StringIO
|
5 | 5 | from datetime import datetime
|
6 |
| -import sys, os, signal, logging |
7 |
| -from urllib.error import HTTPError |
| 6 | +import sys, os, signal, logging, socket |
| 7 | +from urllib.error import HTTPError, URLError |
8 | 8 | from urllib.request import Request, urlopen
|
9 | 9 |
|
10 | 10 |
|
11 | 11 | # ================ constants
|
12 | 12 | PROJ = {'name' : 'pyarxiver',
|
13 |
| - 'version' : '1.00', |
| 13 | + 'version' : '1.01', |
14 | 14 | 'author' : 'compy',
|
15 | 15 | 'page' : 'https://github.com/com-py/',
|
16 | 16 | 'license' : 'https://creativecommons.org/licenses/by-nc-sa/4.0/',
|
@@ -64,10 +64,13 @@ def download_data(url, header=False): # download data as bytes
|
64 | 64 | while retry < retries:
|
65 | 65 | retry += 1
|
66 | 66 | try:
|
67 |
| - data = urlopen(req, timeout=10).read() |
68 |
| - except HTTPError: # session likely expired |
| 67 | + data = urlopen(req).read() |
| 68 | + except socket.timeout as error: # session likely expired |
69 | 69 | if retry < retries:
|
70 |
| - msg.info('http error ... {}'.format(url[-10:])) |
| 70 | + msg.info('timeout error ... {}, frag {} '.format(error, count)) |
| 71 | + except (HTTPError, URLError) as error: # session likely expired |
| 72 | + if retry < retries: |
| 73 | + msg.info('http error ... {}, frag {} '.format(error, count)) |
71 | 74 | else: # successful
|
72 | 75 | success = True
|
73 | 76 | break
|
|
0 commit comments