mirror of
https://github.com/YGGverse/qBt_SE.git
synced 2025-02-07 20:34:21 +00:00
rebase to pathlib
This commit is contained in:
parent
bce7ee932e
commit
657edba195
58
kinozal.py
58
kinozal.py
@ -1,4 +1,4 @@
|
|||||||
# VERSION: 2.4
|
# VERSION: 2.5
|
||||||
# AUTHORS: imDMG [imdmgg@gmail.com]
|
# AUTHORS: imDMG [imdmgg@gmail.com]
|
||||||
|
|
||||||
# Kinozal.tv search engine plugin for qBittorrent
|
# Kinozal.tv search engine plugin for qBittorrent
|
||||||
@ -7,16 +7,15 @@ import base64
|
|||||||
import gzip
|
import gzip
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import tempfile
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from concurrent.futures.thread import ThreadPoolExecutor
|
from concurrent.futures.thread import ThreadPoolExecutor
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from html import unescape
|
from html import unescape
|
||||||
from http.cookiejar import MozillaCookieJar
|
from http.cookiejar import MozillaCookieJar
|
||||||
|
from pathlib import Path
|
||||||
|
from tempfile import NamedTemporaryFile
|
||||||
from urllib.error import URLError, HTTPError
|
from urllib.error import URLError, HTTPError
|
||||||
from urllib.parse import urlencode, unquote
|
from urllib.parse import urlencode, unquote
|
||||||
from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler
|
from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler
|
||||||
@ -25,7 +24,6 @@ from novaprinter import prettyPrinter
|
|||||||
|
|
||||||
# default config
|
# default config
|
||||||
config = {
|
config = {
|
||||||
"version": 3,
|
|
||||||
"torrentDate": True,
|
"torrentDate": True,
|
||||||
"username": "USERNAME",
|
"username": "USERNAME",
|
||||||
"password": "PASSWORD",
|
"password": "PASSWORD",
|
||||||
@ -38,24 +36,26 @@ config = {
|
|||||||
"ua": "Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0 "
|
"ua": "Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0 "
|
||||||
}
|
}
|
||||||
|
|
||||||
|
FILE = Path(__file__)
|
||||||
|
BASEDIR = FILE.parent.absolute()
|
||||||
|
|
||||||
def path_to(*file):
|
FILENAME = FILE.name[:-3]
|
||||||
return os.path.abspath(os.path.join(os.path.dirname(__file__), *file))
|
FILE_J, FILE_C = [BASEDIR / (FILENAME + fl) for fl in ['.json', '.cookie']]
|
||||||
|
|
||||||
|
PAGES = 50
|
||||||
|
|
||||||
|
|
||||||
def rng(t):
|
def rng(t):
|
||||||
return range(1, -(-t // 50))
|
return range(1, -(-t // PAGES))
|
||||||
|
|
||||||
|
|
||||||
RE_TORRENTS = re.compile(
|
RE_TORRENTS = re.compile(
|
||||||
r'nam"><a\s+?href="/(.+?)"\s+?class="r\d">(.+?)</a>.+?s\'>.+?s\'>(.+?)<.+?'
|
r'nam"><a\s+?href="/(.+?)"\s+?class="r\d">(.+?)</a>.+?s\'>.+?s\'>(.+?)<.+?'
|
||||||
r'sl_s\'>(\d+?)<.+?sl_p\'>(\d+?)<.+?s\'>(.+?)</td>', re.S)
|
r'sl_s\'>(\d+?)<.+?sl_p\'>(\d+?)<.+?s\'>(.+?)</td>', re.S
|
||||||
|
)
|
||||||
RE_RESULTS = re.compile(r'</span>Найдено\s+?(\d+?)\s+?раздач', re.S)
|
RE_RESULTS = re.compile(r'</span>Найдено\s+?(\d+?)\s+?раздач', re.S)
|
||||||
PATTERNS = ('%sbrowse.php?s=%s&c=%s', "%s&page=%s")
|
PATTERNS = ('%sbrowse.php?s=%s&c=%s', "%s&page=%s")
|
||||||
|
|
||||||
FILENAME = os.path.basename(__file__)[:-3]
|
|
||||||
FILE_J, FILE_C = [path_to(FILENAME + fe) for fe in ['.json', '.cookie']]
|
|
||||||
|
|
||||||
# base64 encoded image
|
# base64 encoded image
|
||||||
ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAQAQAAAAAAAAAAA"
|
ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAQAQAAAAAAAAAAA"
|
||||||
"AAAAAAAAAAAACARztMgEc7/4BHO/+ARztMAAAAAIBHO0yhd2n/gEc7/6F3af+ARztMAAAA"
|
"AAAAAAAAAAAACARztMgEc7/4BHO/+ARztMAAAAAIBHO0yhd2n/gEc7/6F3af+ARztMAAAA"
|
||||||
@ -84,33 +84,24 @@ ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAQAQAAAAAAAAAAA"
|
|||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
|
format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
|
||||||
datefmt="%m-%d %H:%M",
|
datefmt="%m-%d %H:%M",
|
||||||
level=logging.DEBUG)
|
level=logging.DEBUG
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# try to load user data from file
|
config = json.loads(FILE_J.read_text())
|
||||||
with open(FILE_J, 'r+') as f:
|
|
||||||
cfg = json.load(f)
|
|
||||||
if "version" not in cfg.keys():
|
|
||||||
cfg.update({"version": 2, "torrentDate": True})
|
|
||||||
f.seek(0)
|
|
||||||
f.write(json.dumps(cfg, indent=4, sort_keys=False))
|
|
||||||
f.truncate()
|
|
||||||
config = cfg
|
|
||||||
logger.debug("Config is loaded.")
|
logger.debug("Config is loaded.")
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
# if file doesn't exist, we'll create it
|
# if file doesn't exist, we'll create it
|
||||||
with open(FILE_J, 'w') as f:
|
FILE_J.write_text(json.dumps(config, indent=4, sort_keys=False))
|
||||||
f.write(json.dumps(config, indent=4, sort_keys=False))
|
|
||||||
# also write/rewrite ico file
|
# also write/rewrite ico file
|
||||||
with open(path_to(FILENAME + '.ico'), 'wb') as f:
|
(BASEDIR / (FILENAME + '.ico')).write_bytes(base64.b64decode(ICON))
|
||||||
f.write(base64.b64decode(ICON))
|
|
||||||
logger.debug("Write files.")
|
logger.debug("Write files.")
|
||||||
|
|
||||||
|
|
||||||
class kinozal:
|
class Kinozal:
|
||||||
name = 'Kinozal'
|
name = 'Kinozal'
|
||||||
url = 'http://kinozal.tv/'
|
url = 'http://kinozal.tv/'
|
||||||
url_dl = url.replace("//", "//dl.")
|
url_dl = url.replace("//", "//dl.")
|
||||||
@ -170,7 +161,7 @@ class kinozal:
|
|||||||
self.pretty_error(what)
|
self.pretty_error(what)
|
||||||
return None
|
return None
|
||||||
# do async requests
|
# do async requests
|
||||||
if total > 50:
|
if total > PAGES:
|
||||||
qrs = [PATTERNS[1] % (query, x) for x in rng(total)]
|
qrs = [PATTERNS[1] % (query, x) for x in rng(total)]
|
||||||
with ThreadPoolExecutor(len(qrs)) as executor:
|
with ThreadPoolExecutor(len(qrs)) as executor:
|
||||||
executor.map(self.searching, qrs, timeout=30)
|
executor.map(self.searching, qrs, timeout=30)
|
||||||
@ -195,10 +186,9 @@ class kinozal:
|
|||||||
path = 'magnet:?xt=urn:btih:' + response.decode()[18:58]
|
path = 'magnet:?xt=urn:btih:' + response.decode()[18:58]
|
||||||
else:
|
else:
|
||||||
# Create a torrent file
|
# Create a torrent file
|
||||||
file, path = tempfile.mkstemp('.torrent')
|
with NamedTemporaryFile(suffix='.torrent', delete=False) as fd:
|
||||||
with os.fdopen(file, "wb") as fd:
|
|
||||||
# Write it to a file
|
|
||||||
fd.write(response)
|
fd.write(response)
|
||||||
|
path = fd.name
|
||||||
|
|
||||||
# return magnet link / file path
|
# return magnet link / file path
|
||||||
logger.debug(path + " " + url)
|
logger.debug(path + " " + url)
|
||||||
@ -214,7 +204,8 @@ class kinozal:
|
|||||||
logger.debug(f"Login. Data before: {form_data}")
|
logger.debug(f"Login. Data before: {form_data}")
|
||||||
# so we first encode vals to cp1251 then do default decode whole string
|
# so we first encode vals to cp1251 then do default decode whole string
|
||||||
data_encoded = urlencode(
|
data_encoded = urlencode(
|
||||||
{k: v.encode('cp1251') for k, v in form_data.items()}).encode()
|
{k: v.encode('cp1251') for k, v in form_data.items()}
|
||||||
|
).encode()
|
||||||
logger.debug(f"Login. Data after: {data_encoded}")
|
logger.debug(f"Login. Data after: {data_encoded}")
|
||||||
|
|
||||||
self._catch_error_request(self.url_login, data_encoded)
|
self._catch_error_request(self.url_login, data_encoded)
|
||||||
@ -311,6 +302,9 @@ class kinozal:
|
|||||||
self.error = None
|
self.error = None
|
||||||
|
|
||||||
|
|
||||||
|
# pep8
|
||||||
|
kinozal = Kinozal
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
engine = kinozal()
|
engine = kinozal()
|
||||||
engine.search('doctor')
|
engine.search('doctor')
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#VERSION: 1.42
|
#VERSION: 1.43
|
||||||
|
|
||||||
# Author:
|
# Author:
|
||||||
# Christophe DUMEZ (chris@qbittorrent.org)
|
# Christophe DUMEZ (chris@qbittorrent.org)
|
||||||
@ -40,7 +40,7 @@ import urllib.parse
|
|||||||
import urllib.request
|
import urllib.request
|
||||||
|
|
||||||
# Some sites blocks default python User-agent
|
# Some sites blocks default python User-agent
|
||||||
user_agent = 'Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0'
|
user_agent = 'Mozilla/5.0 (X11; Linux x86_64; rv:68.0) Gecko/20100101 Firefox/68.0'
|
||||||
headers = {'User-Agent': user_agent}
|
headers = {'User-Agent': user_agent}
|
||||||
# SOCKS5 Proxy support
|
# SOCKS5 Proxy support
|
||||||
if "sock_proxy" in os.environ and len(os.environ["sock_proxy"].strip()) > 0:
|
if "sock_proxy" in os.environ and len(os.environ["sock_proxy"].strip()) > 0:
|
||||||
|
61
nnmclub.py
61
nnmclub.py
@ -1,4 +1,4 @@
|
|||||||
# VERSION: 2.6
|
# VERSION: 2.7
|
||||||
# AUTHORS: imDMG [imdmgg@gmail.com]
|
# AUTHORS: imDMG [imdmgg@gmail.com]
|
||||||
|
|
||||||
# NoNaMe-Club search engine plugin for qBittorrent
|
# NoNaMe-Club search engine plugin for qBittorrent
|
||||||
@ -6,14 +6,14 @@
|
|||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import tempfile
|
|
||||||
import time
|
import time
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from html import unescape
|
from html import unescape
|
||||||
from http.cookiejar import Cookie, MozillaCookieJar
|
from http.cookiejar import Cookie, MozillaCookieJar
|
||||||
|
from pathlib import Path
|
||||||
|
from tempfile import NamedTemporaryFile
|
||||||
from urllib.error import URLError, HTTPError
|
from urllib.error import URLError, HTTPError
|
||||||
from urllib.parse import urlencode, unquote
|
from urllib.parse import urlencode, unquote
|
||||||
from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler
|
from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler
|
||||||
@ -35,25 +35,27 @@ config = {
|
|||||||
"ua": "Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0 "
|
"ua": "Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0 "
|
||||||
}
|
}
|
||||||
|
|
||||||
|
FILE = Path(__file__)
|
||||||
|
BASEDIR = FILE.parent.absolute()
|
||||||
|
|
||||||
def path_to(*file):
|
FILENAME = FILE.name[:-3]
|
||||||
return os.path.abspath(os.path.join(os.path.dirname(__file__), *file))
|
FILE_J, FILE_C = [BASEDIR / (FILENAME + fl) for fl in ['.json', '.cookie']]
|
||||||
|
|
||||||
|
PAGES = 50
|
||||||
|
|
||||||
|
|
||||||
def rng(t):
|
def rng(t):
|
||||||
return range(50, -(-t // 50) * 50, 50)
|
return range(PAGES, -(-t // PAGES) * PAGES, PAGES)
|
||||||
|
|
||||||
|
|
||||||
RE_TORRENTS = re.compile(
|
RE_TORRENTS = re.compile(
|
||||||
r'topictitle"\shref="(.+?)"><b>(.+?)</b>.+?href="(d.+?)".+?<u>(\d+?)</u>.+?'
|
r'topictitle"\shref="(.+?)"><b>(.+?)</b>.+?href="(d.+?)".+?<u>(\d+?)</u>.+?'
|
||||||
r'<b>(\d+)</b>.+?<b>(\d+)</b>.+?<u>(\d+)</u>', re.S)
|
r'<b>(\d+)</b>.+?<b>(\d+)</b>.+?<u>(\d+)</u>', re.S
|
||||||
|
)
|
||||||
RE_RESULTS = re.compile(r'TP_VER">(?:Результатов\sпоиска:\s(\d{1,3}))?\s', re.S)
|
RE_RESULTS = re.compile(r'TP_VER">(?:Результатов\sпоиска:\s(\d{1,3}))?\s', re.S)
|
||||||
RE_CODE = re.compile(r'name="code"\svalue="(.+?)"', re.S)
|
RE_CODE = re.compile(r'name="code"\svalue="(.+?)"', re.S)
|
||||||
PATTERNS = ('%stracker.php?nm=%s&%s', "%s&start=%s")
|
PATTERNS = ('%stracker.php?nm=%s&%s', "%s&start=%s")
|
||||||
|
|
||||||
FILENAME = os.path.basename(__file__)[:-3]
|
|
||||||
FILE_J, FILE_C = [path_to(FILENAME + fe) for fe in ['.json', '.cookie']]
|
|
||||||
|
|
||||||
# base64 encoded image
|
# base64 encoded image
|
||||||
ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAAAAAAAAAAAAAAA"
|
ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAAAAAAAAAAAAAAA"
|
||||||
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaQicAXRQFADICAQAHAAAA"
|
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaQicAXRQFADICAQAHAAAA"
|
||||||
@ -82,33 +84,24 @@ ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAAAAAAAAAAAAAAA"
|
|||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
|
format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
|
||||||
datefmt="%m-%d %H:%M",
|
datefmt="%m-%d %H:%M",
|
||||||
level=logging.DEBUG)
|
level=logging.DEBUG
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# try to load user data from file
|
config = json.loads(FILE_J.read_text())
|
||||||
with open(FILE_J, 'r+') as f:
|
|
||||||
cfg = json.load(f)
|
|
||||||
if "version" not in cfg.keys():
|
|
||||||
cfg.update({"version": 2, "torrentDate": True})
|
|
||||||
f.seek(0)
|
|
||||||
f.write(json.dumps(cfg, indent=4, sort_keys=False))
|
|
||||||
f.truncate()
|
|
||||||
config = cfg
|
|
||||||
logger.debug("Config is loaded.")
|
logger.debug("Config is loaded.")
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
# if file doesn't exist, we'll create it
|
# if file doesn't exist, we'll create it
|
||||||
with open(FILE_J, 'w') as f:
|
FILE_J.write_text(json.dumps(config, indent=4, sort_keys=False))
|
||||||
f.write(json.dumps(config, indent=4, sort_keys=False))
|
|
||||||
# also write/rewrite ico file
|
# also write/rewrite ico file
|
||||||
with open(path_to(FILENAME + '.ico'), 'wb') as f:
|
(BASEDIR / (FILENAME + '.ico')).write_bytes(base64.b64decode(ICON))
|
||||||
f.write(base64.b64decode(ICON))
|
|
||||||
logger.debug("Write files.")
|
logger.debug("Write files.")
|
||||||
|
|
||||||
|
|
||||||
class nnmclub:
|
class NNMClub:
|
||||||
name = 'NoNaMe-Club'
|
name = 'NoNaMe-Club'
|
||||||
url = 'https://nnmclub.to/forum/'
|
url = 'https://nnmclub.to/forum/'
|
||||||
url_dl = 'https://nnm-club.ws/'
|
url_dl = 'https://nnm-club.ws/'
|
||||||
@ -169,7 +162,7 @@ class nnmclub:
|
|||||||
self.pretty_error(what)
|
self.pretty_error(what)
|
||||||
return None
|
return None
|
||||||
# do async requests
|
# do async requests
|
||||||
if total > 50:
|
if total > PAGES:
|
||||||
qrs = [PATTERNS[1] % (query, x) for x in rng(total)]
|
qrs = [PATTERNS[1] % (query, x) for x in rng(total)]
|
||||||
with ThreadPoolExecutor(len(qrs)) as executor:
|
with ThreadPoolExecutor(len(qrs)) as executor:
|
||||||
executor.map(self.searching, qrs, timeout=30)
|
executor.map(self.searching, qrs, timeout=30)
|
||||||
@ -185,14 +178,12 @@ class nnmclub:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
# Create a torrent file
|
# Create a torrent file
|
||||||
file, path = tempfile.mkstemp('.torrent')
|
with NamedTemporaryFile(suffix='.torrent', delete=False) as fd:
|
||||||
with os.fdopen(file, "wb") as fd:
|
|
||||||
# Write it to a file
|
|
||||||
fd.write(response)
|
fd.write(response)
|
||||||
|
|
||||||
# return file path
|
# return file path
|
||||||
logger.debug(path + " " + url)
|
logger.debug(fd.name + " " + url)
|
||||||
print(path + " " + url)
|
print(fd.name + " " + url)
|
||||||
|
|
||||||
def login(self, mcj):
|
def login(self, mcj):
|
||||||
if self.error:
|
if self.error:
|
||||||
@ -214,7 +205,8 @@ class nnmclub:
|
|||||||
"login": "Вход"}
|
"login": "Вход"}
|
||||||
# so we first encode vals to cp1251 then do default decode whole string
|
# so we first encode vals to cp1251 then do default decode whole string
|
||||||
data_encoded = urlencode(
|
data_encoded = urlencode(
|
||||||
{k: v.encode('cp1251') for k, v in form_data.items()}).encode()
|
{k: v.encode('cp1251') for k, v in form_data.items()}
|
||||||
|
).encode()
|
||||||
|
|
||||||
self._catch_error_request(self.url_login, data_encoded)
|
self._catch_error_request(self.url_login, data_encoded)
|
||||||
if self.error:
|
if self.error:
|
||||||
@ -303,6 +295,9 @@ class nnmclub:
|
|||||||
self.error = None
|
self.error = None
|
||||||
|
|
||||||
|
|
||||||
|
# pep8
|
||||||
|
nnmclub = NNMClub
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
engine = nnmclub()
|
engine = nnmclub()
|
||||||
engine.search('doctor')
|
engine.search('doctor')
|
||||||
|
55
rutor.py
55
rutor.py
@ -1,20 +1,18 @@
|
|||||||
# VERSION: 1.1
|
# VERSION: 1.2
|
||||||
# AUTHORS: imDMG [imdmgg@gmail.com]
|
# AUTHORS: imDMG [imdmgg@gmail.com]
|
||||||
|
|
||||||
# Rutor.org search engine plugin for qBittorrent
|
# Rutor.org search engine plugin for qBittorrent
|
||||||
|
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import tempfile
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from concurrent.futures.thread import ThreadPoolExecutor
|
from concurrent.futures.thread import ThreadPoolExecutor
|
||||||
from html import unescape
|
from html import unescape
|
||||||
|
from pathlib import Path
|
||||||
|
from tempfile import NamedTemporaryFile
|
||||||
from urllib.error import URLError, HTTPError
|
from urllib.error import URLError, HTTPError
|
||||||
from urllib.parse import unquote
|
from urllib.parse import unquote
|
||||||
from urllib.request import build_opener, ProxyHandler
|
from urllib.request import build_opener, ProxyHandler
|
||||||
@ -23,7 +21,6 @@ from novaprinter import prettyPrinter
|
|||||||
|
|
||||||
# default config
|
# default config
|
||||||
config = {
|
config = {
|
||||||
"version": 2,
|
|
||||||
"torrentDate": True,
|
"torrentDate": True,
|
||||||
"username": "USERNAME",
|
"username": "USERNAME",
|
||||||
"password": "PASSWORD",
|
"password": "PASSWORD",
|
||||||
@ -35,24 +32,26 @@ config = {
|
|||||||
"ua": "Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0 "
|
"ua": "Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0 "
|
||||||
}
|
}
|
||||||
|
|
||||||
|
FILE = Path(__file__)
|
||||||
|
BASEDIR = FILE.parent.absolute()
|
||||||
|
|
||||||
def path_to(*file):
|
FILENAME = FILE.name[:-3]
|
||||||
return os.path.abspath(os.path.join(os.path.dirname(__file__), *file))
|
FILE_J, FILE_C = [BASEDIR / (FILENAME + fl) for fl in ['.json', '.cookie']]
|
||||||
|
|
||||||
|
PAGES = 100
|
||||||
|
|
||||||
|
|
||||||
def rng(t):
|
def rng(t):
|
||||||
return range(1, -(-t // 100))
|
return range(1, -(-t // PAGES))
|
||||||
|
|
||||||
|
|
||||||
RE_TORRENTS = re.compile(
|
RE_TORRENTS = re.compile(
|
||||||
r'(?:gai|tum)"><td>(.+?)</td.+?href="/(torrent/(\d+).+?)">(.+?)</a.+?right"'
|
r'(?:gai|tum)"><td>(.+?)</td.+?href="/(torrent/(\d+).+?)">(.+?)</a.+?right"'
|
||||||
r'>([.\d]+ \w+)</td.+?alt="S"\s/>(.+?)</s.+?red">(.+?)</s', re.S)
|
r'>([.\d]+ \w+)</td.+?alt="S"\s/>(.+?)</s.+?red">(.+?)</s', re.S
|
||||||
|
)
|
||||||
RE_RESULTS = re.compile(r'</b>\sРезультатов\sпоиска\s(\d{1,4})\s', re.S)
|
RE_RESULTS = re.compile(r'</b>\sРезультатов\sпоиска\s(\d{1,4})\s', re.S)
|
||||||
PATTERNS = ('%ssearch/%i/%i/000/0/%s',)
|
PATTERNS = ('%ssearch/%i/%i/000/0/%s',)
|
||||||
|
|
||||||
FILENAME = os.path.basename(__file__)[:-3]
|
|
||||||
FILE_J, FILE_C = [path_to(FILENAME + fe) for fe in ['.json', '.cookie']]
|
|
||||||
|
|
||||||
# base64 encoded image
|
# base64 encoded image
|
||||||
ICON = ("AAABAAEAEBAAAAEAGABoAwAAFgAAACgAAAAQAAAAIAAAAAEAGAAAAAAAAAAAAAAAAAAAAA"
|
ICON = ("AAABAAEAEBAAAAEAGABoAwAAFgAAACgAAAAQAAAAIAAAAAEAGAAAAAAAAAAAAAAAAAAAAA"
|
||||||
"AAAAAAAAAAAAAAAc4AAMwHNdcQ4vsN3fYS2fUY3fUe3fMj4fkk4fco4PYo5fgk7f5gp8Zu"
|
"AAAAAAAAAAAAAAAc4AAMwHNdcQ4vsN3fYS2fUY3fUe3fMj4fkk4fco4PYo5fgk7f5gp8Zu"
|
||||||
@ -77,27 +76,24 @@ ICON = ("AAABAAEAEBAAAAEAGABoAwAAFgAAACgAAAAQAAAAIAAAAAEAGAAAAAAAAAAAAAAAAAAAAA"
|
|||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
|
format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
|
||||||
datefmt="%m-%d %H:%M",
|
datefmt="%m-%d %H:%M",
|
||||||
level=logging.DEBUG)
|
level=logging.DEBUG
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# try to load user data from file
|
config = json.loads(FILE_J.read_text())
|
||||||
with open(FILE_J, 'r+') as f:
|
|
||||||
config = json.load(f)
|
|
||||||
logger.debug("Config is loaded.")
|
logger.debug("Config is loaded.")
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
# if file doesn't exist, we'll create it
|
# if file doesn't exist, we'll create it
|
||||||
with open(FILE_J, 'w') as f:
|
FILE_J.write_text(json.dumps(config, indent=4, sort_keys=False))
|
||||||
f.write(json.dumps(config, indent=4, sort_keys=False))
|
|
||||||
# also write/rewrite ico file
|
# also write/rewrite ico file
|
||||||
with open(path_to(FILENAME + '.ico'), 'wb') as f:
|
(BASEDIR / (FILENAME + '.ico')).write_bytes(base64.b64decode(ICON))
|
||||||
f.write(base64.b64decode(ICON))
|
|
||||||
logger.debug("Write files.")
|
logger.debug("Write files.")
|
||||||
|
|
||||||
|
|
||||||
class rutor:
|
class Rutor:
|
||||||
name = 'Rutor'
|
name = 'Rutor'
|
||||||
url = 'http://rutor.info/'
|
url = 'http://rutor.info/'
|
||||||
url_dl = url.replace("//", "//d.") + "download/"
|
url_dl = url.replace("//", "//d.") + "download/"
|
||||||
@ -142,7 +138,7 @@ class rutor:
|
|||||||
self.pretty_error(what)
|
self.pretty_error(what)
|
||||||
return None
|
return None
|
||||||
# do async requests
|
# do async requests
|
||||||
if total > 100:
|
if total > PAGES:
|
||||||
query = query.replace('h/0', 'h/%i')
|
query = query.replace('h/0', 'h/%i')
|
||||||
qrs = [query % x for x in rng(total)]
|
qrs = [query % x for x in rng(total)]
|
||||||
with ThreadPoolExecutor(len(qrs)) as executor:
|
with ThreadPoolExecutor(len(qrs)) as executor:
|
||||||
@ -159,14 +155,12 @@ class rutor:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
# Create a torrent file
|
# Create a torrent file
|
||||||
file, path = tempfile.mkstemp('.torrent')
|
with NamedTemporaryFile(suffix='.torrent', delete=False) as fd:
|
||||||
with os.fdopen(file, "wb") as fd:
|
|
||||||
# Write it to a file
|
|
||||||
fd.write(response)
|
fd.write(response)
|
||||||
|
|
||||||
# return file path
|
# return file path
|
||||||
logger.debug(path + " " + url)
|
logger.debug(fd.name + " " + url)
|
||||||
print(path + " " + url)
|
print(fd.name + " " + url)
|
||||||
|
|
||||||
def searching(self, query, first=False):
|
def searching(self, query, first=False):
|
||||||
response = self._catch_error_request(query)
|
response = self._catch_error_request(query)
|
||||||
@ -242,6 +236,9 @@ class rutor:
|
|||||||
self.error = None
|
self.error = None
|
||||||
|
|
||||||
|
|
||||||
|
# pep8
|
||||||
|
rutor = Rutor
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
engine = rutor()
|
engine = rutor()
|
||||||
engine.search('doctor')
|
engine.search('doctor')
|
||||||
|
64
rutracker.py
64
rutracker.py
@ -1,4 +1,4 @@
|
|||||||
# VERSION: 1.3
|
# VERSION: 1.4
|
||||||
# AUTHORS: imDMG [imdmgg@gmail.com]
|
# AUTHORS: imDMG [imdmgg@gmail.com]
|
||||||
|
|
||||||
# rutracker.org search engine plugin for qBittorrent
|
# rutracker.org search engine plugin for qBittorrent
|
||||||
@ -6,15 +6,14 @@
|
|||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import tempfile
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from html import unescape
|
from html import unescape
|
||||||
from http.cookiejar import Cookie, MozillaCookieJar
|
from http.cookiejar import Cookie, MozillaCookieJar
|
||||||
|
from pathlib import Path
|
||||||
|
from tempfile import NamedTemporaryFile
|
||||||
from urllib.error import URLError, HTTPError
|
from urllib.error import URLError, HTTPError
|
||||||
from urllib.parse import urlencode, unquote
|
from urllib.parse import urlencode, unquote
|
||||||
from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler
|
from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler
|
||||||
@ -23,7 +22,6 @@ from novaprinter import prettyPrinter
|
|||||||
|
|
||||||
# default config
|
# default config
|
||||||
config = {
|
config = {
|
||||||
"version": 2,
|
|
||||||
"torrentDate": True,
|
"torrentDate": True,
|
||||||
"username": "USERNAME",
|
"username": "USERNAME",
|
||||||
"password": "PASSWORD",
|
"password": "PASSWORD",
|
||||||
@ -35,25 +33,27 @@ config = {
|
|||||||
"ua": "Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0 "
|
"ua": "Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0 "
|
||||||
}
|
}
|
||||||
|
|
||||||
|
FILE = Path(__file__)
|
||||||
|
BASEDIR = FILE.parent.absolute()
|
||||||
|
|
||||||
def path_to(*file):
|
FILENAME = FILE.name[:-3]
|
||||||
return os.path.abspath(os.path.join(os.path.dirname(__file__), *file))
|
FILE_J, FILE_C = [BASEDIR / (FILENAME + fl) for fl in ['.json', '.cookie']]
|
||||||
|
|
||||||
|
PAGES = 50
|
||||||
|
|
||||||
|
|
||||||
def rng(t):
|
def rng(t):
|
||||||
return range(50, -(-t // 50) * 50, 50)
|
return range(PAGES, -(-t // PAGES) * PAGES, PAGES)
|
||||||
|
|
||||||
|
|
||||||
RE_TORRENTS = re.compile(
|
RE_TORRENTS = re.compile(
|
||||||
r'data-topic_id="(\d+?)".+?">(.+?)</a.+?tor-size"\sdata-ts_text="(\d+?)">'
|
r'data-topic_id="(\d+?)".+?">(.+?)</a.+?tor-size"\sdata-ts_text="(\d+?)">'
|
||||||
r'.+?data-ts_text="([-0-9]+?)">.+?Личи">(\d+?)</.+?data-ts_text="(\d+?)">',
|
r'.+?data-ts_text="([-0-9]+?)">.+?Личи">(\d+?)</.+?data-ts_text="(\d+?)">',
|
||||||
re.S)
|
re.S
|
||||||
|
)
|
||||||
RE_RESULTS = re.compile(r'Результатов\sпоиска:\s(\d{1,3})\s<span', re.S)
|
RE_RESULTS = re.compile(r'Результатов\sпоиска:\s(\d{1,3})\s<span', re.S)
|
||||||
PATTERNS = ('%s/tracker.php?nm=%s&c=%s', "%s&start=%s")
|
PATTERNS = ('%s/tracker.php?nm=%s&c=%s', "%s&start=%s")
|
||||||
|
|
||||||
FILENAME = os.path.basename(__file__)[:-3]
|
|
||||||
FILE_J, FILE_C = [path_to(FILENAME + fl) for fl in ['.json', '.cookie']]
|
|
||||||
|
|
||||||
# base64 encoded image
|
# base64 encoded image
|
||||||
ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAAAAAABMLAAATCw"
|
ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAAAAAABMLAAATCw"
|
||||||
"AAAAAAAAAAAAAAAAAAAAAAAAAAAABs3wUAY8wFBGPMBQN2sw8A9kA6AOdOOl/nTjo/5046"
|
"AAAAAAAAAAAAAAAAAAAAAAAAAAAABs3wUAY8wFBGPMBQN2sw8A9kA6AOdOOl/nTjo/5046"
|
||||||
@ -82,40 +82,30 @@ ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAAAAAABMLAAATCw"
|
|||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
|
format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
|
||||||
datefmt="%m-%d %H:%M",
|
datefmt="%m-%d %H:%M",
|
||||||
level=logging.DEBUG)
|
level=logging.DEBUG
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# try to load user data from file
|
config = json.loads(FILE_J.read_text())
|
||||||
with open(FILE_J, 'r+') as f:
|
|
||||||
config = json.load(f)
|
|
||||||
logger.debug("Config is loaded.")
|
logger.debug("Config is loaded.")
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
# if file doesn't exist, we'll create it
|
# if file doesn't exist, we'll create it
|
||||||
with open(FILE_J, 'w') as f:
|
FILE_J.write_text(json.dumps(config, indent=4, sort_keys=False))
|
||||||
f.write(json.dumps(config, indent=4, sort_keys=False))
|
|
||||||
# also write/rewrite ico file
|
# also write/rewrite ico file
|
||||||
with open(path_to(FILENAME + '.ico'), 'wb') as f:
|
(BASEDIR / (FILENAME + '.ico')).write_bytes(base64.b64decode(ICON))
|
||||||
f.write(base64.b64decode(ICON))
|
|
||||||
logger.debug("Write files.")
|
logger.debug("Write files.")
|
||||||
|
|
||||||
|
|
||||||
class rutracker:
|
class Rutracker:
|
||||||
name = 'Rutracker'
|
name = 'Rutracker'
|
||||||
url = 'https://rutracker.org/forum/'
|
url = 'https://rutracker.org/forum/'
|
||||||
url_dl = url + 'dl.php?t='
|
url_dl = url + 'dl.php?t='
|
||||||
url_login = url + 'login.php'
|
url_login = url + 'login.php'
|
||||||
supported_categories = {'all': '-1'}
|
supported_categories = {'all': '-1'}
|
||||||
|
|
||||||
# 'movies': '2',
|
|
||||||
# 'tv': '3',
|
|
||||||
# 'music': '4',
|
|
||||||
# 'games': '5',
|
|
||||||
# 'anime': '6',
|
|
||||||
# 'software': '7'}
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
# error message
|
# error message
|
||||||
self.error = None
|
self.error = None
|
||||||
@ -163,7 +153,7 @@ class rutracker:
|
|||||||
self.pretty_error(what)
|
self.pretty_error(what)
|
||||||
return None
|
return None
|
||||||
# do async requests
|
# do async requests
|
||||||
if total > 50:
|
if total > PAGES:
|
||||||
qrs = [PATTERNS[1] % (query, x) for x in rng(total)]
|
qrs = [PATTERNS[1] % (query, x) for x in rng(total)]
|
||||||
with ThreadPoolExecutor(len(qrs)) as executor:
|
with ThreadPoolExecutor(len(qrs)) as executor:
|
||||||
executor.map(self.searching, qrs, timeout=30)
|
executor.map(self.searching, qrs, timeout=30)
|
||||||
@ -179,14 +169,12 @@ class rutracker:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
# Create a torrent file
|
# Create a torrent file
|
||||||
file, path = tempfile.mkstemp('.torrent')
|
with NamedTemporaryFile(suffix='.torrent', delete=False) as fd:
|
||||||
with os.fdopen(file, "wb") as fd:
|
|
||||||
# Write it to a file
|
|
||||||
fd.write(response)
|
fd.write(response)
|
||||||
|
|
||||||
# return file path
|
# return file path
|
||||||
logger.debug(path + " " + url)
|
logger.debug(fd.name + " " + url)
|
||||||
print(path + " " + url)
|
print(fd.name + " " + url)
|
||||||
|
|
||||||
def login(self, mcj):
|
def login(self, mcj):
|
||||||
if self.error:
|
if self.error:
|
||||||
@ -203,7 +191,8 @@ class rutracker:
|
|||||||
logger.debug(f"Login. Data before: {form_data}")
|
logger.debug(f"Login. Data before: {form_data}")
|
||||||
# so we first encode vals to cp1251 then do default decode whole string
|
# so we first encode vals to cp1251 then do default decode whole string
|
||||||
data_encoded = urlencode(
|
data_encoded = urlencode(
|
||||||
{k: v.encode('cp1251') for k, v in form_data.items()}).encode()
|
{k: v.encode('cp1251') for k, v in form_data.items()}
|
||||||
|
).encode()
|
||||||
logger.debug(f"Login. Data after: {data_encoded}")
|
logger.debug(f"Login. Data after: {data_encoded}")
|
||||||
self._catch_error_request(self.url_login, data_encoded)
|
self._catch_error_request(self.url_login, data_encoded)
|
||||||
if self.error:
|
if self.error:
|
||||||
@ -293,6 +282,9 @@ class rutracker:
|
|||||||
self.error = None
|
self.error = None
|
||||||
|
|
||||||
|
|
||||||
|
# pep8
|
||||||
|
rutracker = Rutracker
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
engine = rutracker()
|
engine = rutracker()
|
||||||
engine.search('doctor')
|
engine.search('doctor')
|
||||||
|
Loading…
x
Reference in New Issue
Block a user