From d5209d7ddf3cf1536ab85d9c157cd17a1bb2974e Mon Sep 17 00:00:00 2001 From: Douman Date: Wed, 10 Feb 2016 16:02:50 +0300 Subject: [PATCH] Align search engine url getting mechanism. Closes #4778 1. Switch to retrieve_url instead of low-level HTTPConnection module usage --- src/searchengine/nova/engines/demonoid.py | 21 ++++---------- src/searchengine/nova/engines/extratorrent.py | 22 +++++---------- src/searchengine/nova/engines/mininova.py | 22 +++++---------- src/searchengine/nova/engines/piratebay.py | 28 ++++++------------- src/searchengine/nova/engines/versions.txt | 8 +++--- src/searchengine/nova3/engines/demonoid.py | 21 ++++---------- .../nova3/engines/extratorrent.py | 22 +++++---------- src/searchengine/nova3/engines/mininova.py | 22 +++++---------- src/searchengine/nova3/engines/piratebay.py | 22 +++++---------- src/searchengine/nova3/engines/versions.txt | 8 +++--- 10 files changed, 64 insertions(+), 132 deletions(-) diff --git a/src/searchengine/nova/engines/demonoid.py b/src/searchengine/nova/engines/demonoid.py index b1a549a27..f10e401a5 100644 --- a/src/searchengine/nova/engines/demonoid.py +++ b/src/searchengine/nova/engines/demonoid.py @@ -1,4 +1,4 @@ -#VERSION: 1.1 +#VERSION: 1.2 #AUTHORS: Douman (custparasite@gmx.se) #CONTRIBUTORS: Diego de las Heras (ngosang@hotmail.es) @@ -27,13 +27,12 @@ # POSSIBILITY OF SUCH DAMAGE. from HTMLParser import HTMLParser -from httplib import HTTPSConnection as https from re import compile as re_compile from re import DOTALL from itertools import islice #qBt from novaprinter import prettyPrinter -from helpers import download_file +from helpers import download_file, retrieve_url class demonoid(object): """ Search engine class """ @@ -120,18 +119,12 @@ class demonoid(object): def search(self, what, cat='all'): """ Performs search """ - connection = https("www.demonoid.pw") - #prepare query cat = self.supported_categories[cat.lower()] - query = "".join(("/files/?category=", cat, "&subcategory=All&quality=All&seeded=2&external=2&query=", what, "&to=1&uid=0&sort=S")) + query = "".join((self.url, "/files/?category=", cat, "&subcategory=All&quality=All&seeded=2&external=2&query=", what, "&to=1&uid=0&sort=S")) - connection.request("GET", query) - response = connection.getresponse() - if response.status != 200: - return + data = retrieve_url(query) - data = response.read().decode("utf-8") add_res_list = re_compile("/files.*page=[0-9]+") torrent_list = re_compile("start torrent list -->(.*)(.*)