Browse Source

Merge pull request #1951 from masux/master

This fix qbittorrent/qBittorrent#1949
adaptive-webui-19844
sledgehammer999 10 years ago
parent
commit
66f46d56f6
  1. 6
      src/searchengine/nova/helpers.py
  2. 6
      src/searchengine/nova3/helpers.py

6
src/searchengine/nova/helpers.py

@ -22,7 +22,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
#VERSION: 1.33 #VERSION: 1.34
# Author: # Author:
# Christophe DUMEZ (chris@qbittorrent.org) # Christophe DUMEZ (chris@qbittorrent.org)
@ -64,7 +64,7 @@ def htmlentitydecode(s):
def retrieve_url(url): def retrieve_url(url):
""" Return the content of the url page as a string """ """ Return the content of the url page as a string """
req = urllib2.Request(url, headers) req = urllib2.Request(url, headers = headers)
response = urllib2.urlopen(req) response = urllib2.urlopen(req)
dat = response.read() dat = response.read()
# Check if it is gzipped # Check if it is gzipped
@ -89,7 +89,7 @@ def download_file(url, referer=None):
file, path = tempfile.mkstemp() file, path = tempfile.mkstemp()
file = os.fdopen(file, "w") file = os.fdopen(file, "w")
# Download url # Download url
req = urllib2.Request(url, headers) req = urllib2.Request(url, headers = headers)
if referer is not None: if referer is not None:
req.add_header('referer', referer) req.add_header('referer', referer)
response = urllib2.urlopen(req) response = urllib2.urlopen(req)

6
src/searchengine/nova3/helpers.py

@ -22,7 +22,7 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
#VERSION: 1.34 #VERSION: 1.35
# Author: # Author:
# Christophe DUMEZ (chris@qbittorrent.org) # Christophe DUMEZ (chris@qbittorrent.org)
@ -64,7 +64,7 @@ def htmlentitydecode(s):
def retrieve_url(url): def retrieve_url(url):
""" Return the content of the url page as a string """ """ Return the content of the url page as a string """
req = urllib.request.Request(url, headers) req = urllib.request.Request(url, headers = headers)
response = urllib.request.urlopen(req) response = urllib.request.urlopen(req)
dat = response.read() dat = response.read()
# Check if it is gzipped # Check if it is gzipped
@ -90,7 +90,7 @@ def download_file(url, referer=None):
file, path = tempfile.mkstemp() file, path = tempfile.mkstemp()
file = os.fdopen(file, "wb") file = os.fdopen(file, "wb")
# Download url # Download url
req = urllib.request.Request(url, headers) req = urllib.request.Request(url, headers = headers)
if referer is not None: if referer is not None:
req.add_header('referer', referer) req.add_header('referer', referer)
response = urllib.request.urlopen(req) response = urllib.request.urlopen(req)

Loading…
Cancel
Save