Browse Source

Merge pull request #2670 from DoumanAsh/safe_url_retrieve

[search engine] Exception free url_retrieve
adaptive-webui-19844
sledgehammer999 10 years ago
parent
commit
e502ce38ec
  1. 8
      src/searchengine/nova/helpers.py
  2. 8
      src/searchengine/nova3/helpers.py

8
src/searchengine/nova/helpers.py

@ -22,7 +22,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
#VERSION: 1.34 #VERSION: 1.40
# Author: # Author:
# Christophe DUMEZ (chris@qbittorrent.org) # Christophe DUMEZ (chris@qbittorrent.org)
@ -65,7 +65,11 @@ def htmlentitydecode(s):
def retrieve_url(url): def retrieve_url(url):
""" Return the content of the url page as a string """ """ Return the content of the url page as a string """
req = urllib2.Request(url, headers = headers) req = urllib2.Request(url, headers = headers)
response = urllib2.urlopen(req) try:
response = urllib2.urlopen(req)
except urllib2.URLError as errno:
print(" ".join(("Connection error:", str(errno.reason))))
return ""
dat = response.read() dat = response.read()
# Check if it is gzipped # Check if it is gzipped
if dat[:2] == '\037\213': if dat[:2] == '\037\213':

8
src/searchengine/nova3/helpers.py

@ -22,7 +22,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
#VERSION: 1.35 #VERSION: 1.40
# Author: # Author:
# Christophe DUMEZ (chris@qbittorrent.org) # Christophe DUMEZ (chris@qbittorrent.org)
@ -65,7 +65,11 @@ def htmlentitydecode(s):
def retrieve_url(url): def retrieve_url(url):
""" Return the content of the url page as a string """ """ Return the content of the url page as a string """
req = urllib.request.Request(url, headers = headers) req = urllib.request.Request(url, headers = headers)
response = urllib.request.urlopen(req) try:
response = urllib.request.urlopen(req)
except urllib.error.URLError as errno:
print(" ".join(("Connection error:", str(errno.reason))))
return ""
dat = response.read() dat = response.read()
# Check if it is gzipped # Check if it is gzipped
if dat[:2] == b'\x1f\x8b': if dat[:2] == b'\x1f\x8b':

Loading…
Cancel
Save