Browse Source

better handling errors

master
imDMG 1 year ago
parent
commit
4b43b63ef3
  1. 6
      README.md
  2. 238
      engines/kinozal.py
  3. 178
      engines/rutor.py
  4. 248
      engines/rutracker.py
  5. 4
      settings_gui.py

6
README.md

@ -1,13 +1,13 @@
[![Python 3.7+](https://img.shields.io/badge/python-%3E%3D%20v3.7-blue)](https://www.python.org/downloads/release/python-370/) [![Python 3.7+](https://img.shields.io/badge/python-%3E%3D%20v3.7-blue)](https://www.python.org/downloads/release/python-370/)
# qBittorrent plugins # qBittorrent plugins
## Rutracker.org ![v1.7](https://img.shields.io/badge/v1.7-blue) ## Rutracker.org ![v1.8](https://img.shields.io/badge/v1.7-blue)
Biggest russian torrent tracker. Biggest russian torrent tracker.
## Rutor.org ![v1.6](https://img.shields.io/badge/v1.6-blue) ## Rutor.org ![v1.7](https://img.shields.io/badge/v1.6-blue)
Popular free russian torrent tracker. Popular free russian torrent tracker.
## Kinozal.tv ![v2.10](https://img.shields.io/badge/v2.10-blue) ## Kinozal.tv ![v2.11](https://img.shields.io/badge/v2.10-blue)
Russian torrent tracker mostly directed on movies, but have other categories. Russian torrent tracker mostly directed on movies, but have other categories.
The site has a restriction on downloading torrent files (10 by default or so), so I added the ability to open the magnet link instead the file. The site has a restriction on downloading torrent files (10 by default or so), so I added the ability to open the magnet link instead the file.

238
engines/kinozal.py

@ -1,4 +1,4 @@
# VERSION: 2.10 # VERSION: 2.11
# AUTHORS: imDMG [imdmgg@gmail.com] # AUTHORS: imDMG [imdmgg@gmail.com]
# Kinozal.tv search engine plugin for qBittorrent # Kinozal.tv search engine plugin for qBittorrent
@ -17,7 +17,7 @@ from html import unescape
from http.cookiejar import MozillaCookieJar from http.cookiejar import MozillaCookieJar
from pathlib import Path from pathlib import Path
from tempfile import NamedTemporaryFile from tempfile import NamedTemporaryFile
from typing import Union, Optional from typing import Callable
from urllib.error import URLError, HTTPError from urllib.error import URLError, HTTPError
from urllib.parse import urlencode, unquote from urllib.parse import urlencode, unquote
from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler
@ -43,8 +43,6 @@ BASEDIR = FILE.parent.absolute()
FILENAME = FILE.stem FILENAME = FILE.stem
FILE_J, FILE_C = [BASEDIR / (FILENAME + fl) for fl in (".json", ".cookie")] FILE_J, FILE_C = [BASEDIR / (FILENAME + fl) for fl in (".json", ".cookie")]
PAGES = 50
RE_TORRENTS = re.compile( RE_TORRENTS = re.compile(
r'nam"><a\s+?href="/(.+?)"\s+?class="r\d">(.+?)</a>.+?s\'>.+?s\'>(.+?)<.+?' r'nam"><a\s+?href="/(.+?)"\s+?class="r\d">(.+?)</a>.+?s\'>.+?s\'>(.+?)<.+?'
r'sl_s\'>(\d+?)<.+?sl_p\'>(\d+?)<.+?s\'>(.+?)</td>', re.S r'sl_s\'>(\d+?)<.+?sl_p\'>(\d+?)<.+?s\'>(.+?)</td>', re.S
@ -52,6 +50,8 @@ RE_TORRENTS = re.compile(
RE_RESULTS = re.compile(r"</span>Найдено\s+?(\d+?)\s+?раздач", re.S) RE_RESULTS = re.compile(r"</span>Найдено\s+?(\d+?)\s+?раздач", re.S)
PATTERNS = ("%sbrowse.php?s=%s&c=%s", "%s&page=%s") PATTERNS = ("%sbrowse.php?s=%s&c=%s", "%s&page=%s")
PAGES = 50
# base64 encoded image # base64 encoded image
ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAQAQAAAAAAAAAAA" ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAQAQAAAAAAAAAAA"
"AAAAAAAAAAAACARztMgEc7/4BHO/+ARztMAAAAAIBHO0yhd2n/gEc7/6F3af+ARztMAAAA" "AAAAAAAAAAAACARztMgEc7/4BHO/+ARztMAAAAAIBHO0yhd2n/gEc7/6F3af+ARztMAAAA"
@ -81,6 +81,10 @@ def rng(t: int) -> range:
return range(1, -(-t // PAGES)) return range(1, -(-t // PAGES))
class EngineError(Exception):
...
@dataclass @dataclass
class Config: class Config:
username: str = "USERNAME" username: str = "USERNAME"
@ -145,88 +149,19 @@ class Kinozal:
"anime": "20", "anime": "20",
"software": "32"} "software": "32"}
# error message
error: Optional[str] = None
# cookies # cookies
mcj = MozillaCookieJar() mcj = MozillaCookieJar()
# establish connection # establish connection
session = build_opener(HTTPCookieProcessor(mcj)) session = build_opener(HTTPCookieProcessor(mcj))
def __init__(self):
# add proxy handler if needed
if config.proxy:
if any(config.proxies.values()):
self.session.add_handler(ProxyHandler(config.proxies))
logger.debug("Proxy is set!")
else:
self.error = "Proxy enabled, but not set!"
# change user-agent
self.session.addheaders = [("User-Agent", config.ua)]
# load local cookies
try:
self.mcj.load(FILE_C, ignore_discard=True)
if "uid" in [cookie.name for cookie in self.mcj]:
# if cookie.expires < int(time.time())
logger.info("Local cookies is loaded")
else:
logger.info("Local cookies expired or bad")
logger.debug(f"That we have: {[cookie for cookie in self.mcj]}")
self.mcj.clear()
self.login()
except FileNotFoundError:
self.login()
def search(self, what: str, cat: str = "all") -> None: def search(self, what: str, cat: str = "all") -> None:
if self.error: self._catch_errors(self._search, what, cat)
self.pretty_error(what)
return None
query = PATTERNS[0] % (self.url, what.replace(" ", "+"),
self.supported_categories[cat])
# make first request (maybe it enough)
t0, total = time.time(), self.searching(query, True)
if self.error:
self.pretty_error(what)
return None
# do async requests
if total > PAGES:
qrs = [PATTERNS[1] % (query, x) for x in rng(total)]
with ThreadPoolExecutor(len(qrs)) as executor:
executor.map(self.searching, qrs, timeout=30)
logger.debug(f"--- {time.time() - t0} seconds ---")
logger.info(f"Found torrents: {total}")
def download_torrent(self, url: str) -> None: def download_torrent(self, url: str) -> None:
# choose download method self._catch_errors(self._download_torrent, url)
if config.magnet:
url = "%sget_srv_details.php?action=2&id=%s" % (self.url,
url.split("=")[1])
response = self._request(url)
if self.error:
self.pretty_error(url)
return None
if config.magnet:
if response.startswith(b"\x1f\x8b\x08"):
response = gzip.decompress(response)
path = "magnet:?xt=urn:btih:" + response.decode()[18:58]
else:
# Create a torrent file
with NamedTemporaryFile(suffix=".torrent", delete=False) as fd:
fd.write(response)
path = fd.name
# return magnet link / file path
logger.debug(path + " " + url)
print(path + " " + url)
def login(self) -> None: def login(self) -> None:
if self.error: self.mcj.clear()
return None
form_data = {"username": config.username, "password": config.password} form_data = {"username": config.username, "password": config.password}
logger.debug(f"Login. Data before: {form_data}") logger.debug(f"Login. Data before: {form_data}")
@ -235,20 +170,16 @@ class Kinozal:
logger.debug(f"Login. Data after: {data_encoded}") logger.debug(f"Login. Data after: {data_encoded}")
self._request(self.url_login, data_encoded) self._request(self.url_login, data_encoded)
if self.error:
return None
logger.debug(f"That we have: {[cookie for cookie in self.mcj]}") logger.debug(f"That we have: {[cookie for cookie in self.mcj]}")
if "uid" in [cookie.name for cookie in self.mcj]: if "uid" not in [cookie.name for cookie in self.mcj]:
self.mcj.save(FILE_C, ignore_discard=True, ignore_expires=True) raise EngineError(
logger.info("We successfully authorized") "We not authorized, please check your credentials!"
else: )
self.error = "We not authorized, please check your credentials!" self.mcj.save(FILE_C, ignore_discard=True, ignore_expires=True)
logger.warning(self.error) logger.info("We successfully authorized")
def searching(self, query: str, first: bool = False) -> Union[None, int]: def searching(self, query: str, first: bool = False) -> int:
response = self._request(query) response = self._request(query)
if self.error:
return None
if response.startswith(b"\x1f\x8b\x08"): if response.startswith(b"\x1f\x8b\x08"):
response = gzip.decompress(response) response = gzip.decompress(response)
page, torrents_found = response.decode("cp1251"), -1 page, torrents_found = response.decode("cp1251"), -1
@ -256,17 +187,13 @@ class Kinozal:
# check login status # check login status
if "Гость! ( Зарегистрируйтесь )" in page: if "Гость! ( Зарегистрируйтесь )" in page:
logger.debug("Looks like we lost session id, lets login") logger.debug("Looks like we lost session id, lets login")
self.mcj.clear()
self.login() self.login()
if self.error:
return None
# firstly we check if there is a result # firstly we check if there is a result
result = RE_RESULTS.search(page) try:
if not result: torrents_found = int(RE_RESULTS.search(page)[1])
self.error = "Unexpected page content" except TypeError:
return None raise EngineError("Unexpected page content")
torrents_found = int(result[1]) if torrents_found <= 0:
if not torrents_found:
return 0 return 0
self.draw(page) self.draw(page)
@ -276,6 +203,8 @@ class Kinozal:
_part = partial(time.strftime, "%y.%m.%d") _part = partial(time.strftime, "%y.%m.%d")
# yeah this is yesterday # yeah this is yesterday
yesterday = _part(time.localtime(time.time() - 86400)) yesterday = _part(time.localtime(time.time() - 86400))
# replace size units
table = {"Т": "T", "Г": "G", "М": "M", "К": "K", "Б": "B"}
for tor in RE_TORRENTS.findall(html): for tor in RE_TORRENTS.findall(html):
torrent_date = "" torrent_date = ""
if config.torrent_date: if config.torrent_date:
@ -288,9 +217,6 @@ class Kinozal:
torrent_date = _part(time.strptime(ct, "%d.%m.%Y")) torrent_date = _part(time.strptime(ct, "%d.%m.%Y"))
torrent_date = f"[{torrent_date}] " torrent_date = f"[{torrent_date}] "
# replace size units
table = {"Т": "T", "Г": "G", "М": "M", "К": "K", "Б": "B"}
prettyPrinter({ prettyPrinter({
"engine_url": self.url, "engine_url": self.url,
"desc_link": self.url + tor[0], "desc_link": self.url + tor[0],
@ -301,40 +227,108 @@ class Kinozal:
"leech": tor[4] "leech": tor[4]
}) })
def _catch_errors(self, handler: Callable, *args: str):
try:
self._init()
handler(*args)
except EngineError as ex:
self.pretty_error(args[0], str(ex))
except Exception as ex:
self.pretty_error(args[0], "Unexpected error, please check logs")
logger.exception(ex)
def _init(self) -> None:
# add proxy handler if needed
if config.proxy:
if not any(config.proxies.values()):
raise EngineError("Proxy enabled, but not set!")
self.session.add_handler(ProxyHandler(config.proxies))
logger.debug("Proxy is set!")
# change user-agent
self.session.addheaders = [("User-Agent", config.ua)]
# load local cookies
try:
self.mcj.load(FILE_C, ignore_discard=True)
if "uid" in [cookie.name for cookie in self.mcj]:
# if cookie.expires < int(time.time())
return logger.info("Local cookies is loaded")
logger.info("Local cookies expired or bad, try to login")
logger.debug(f"That we have: {[cookie for cookie in self.mcj]}")
except FileNotFoundError:
logger.info("Local cookies not exists, try to login")
self.login()
def _search(self, what: str, cat: str = "all") -> None:
query = PATTERNS[0] % (self.url, what.replace(" ", "+"),
self.supported_categories[cat])
# make first request (maybe it enough)
t0, total = time.time(), self.searching(query, True)
# do async requests
if total > PAGES:
qrs = [PATTERNS[1] % (query, x) for x in rng(total)]
with ThreadPoolExecutor(len(qrs)) as executor:
executor.map(self.searching, qrs, timeout=30)
logger.debug(f"--- {time.time() - t0} seconds ---")
logger.info(f"Found torrents: {total}")
def _download_torrent(self, url: str) -> None:
# choose download method
if config.magnet:
url = "%sget_srv_details.php?action=2&id=%s" % (self.url,
url.split("=")[1])
response = self._request(url)
if config.magnet:
if response.startswith(b"\x1f\x8b\x08"):
response = gzip.decompress(response)
path = "magnet:?xt=urn:btih:" + response.decode()[18:58]
else:
# Create a torrent file
with NamedTemporaryFile(suffix=".torrent", delete=False) as fd:
fd.write(response)
path = fd.name
# return magnet link / file path
logger.debug(path + " " + url)
print(path + " " + url)
def _request( def _request(
self, url: str, data: Optional[bytes] = None, repeated: bool = False self, url: str, data: bytes = None, repeated: bool = False
) -> Union[bytes, None]: ) -> bytes:
try: try:
with self.session.open(url, data, 5) as r: with self.session.open(url, data, 5) as r:
# checking that tracker isn't blocked # checking that tracker isn't blocked
if r.geturl().startswith((self.url, self.url_dl)): if r.geturl().startswith((self.url, self.url_dl)):
return r.read() return r.read()
self.error = f"{url} is blocked. Try another proxy." raise EngineError(f"{url} is blocked. Try another proxy.")
except (URLError, HTTPError) as err: except (URLError, HTTPError) as err:
logger.error(err.reason)
error = str(err.reason) error = str(err.reason)
reason = f"{url} is not response! Maybe it is blocked."
if "timed out" in error and not repeated: if "timed out" in error and not repeated:
logger.debug("Repeating request...") logger.debug("Request timed out. Repeating...")
return self._request(url, data, True) return self._request(url, data, True)
if "no host given" in error: if "no host given" in error:
self.error = "Proxy is bad, try another!" reason = "Proxy is bad, try another!"
elif hasattr(err, "code"): elif hasattr(err, "code"):
self.error = f"Request to {url} failed with status: {err.code}" reason = f"Request to {url} failed with status: {err.code}"
else:
self.error = f"{url} is not response! Maybe it is blocked." raise EngineError(reason)
return None def pretty_error(self, what: str, error: str) -> None:
prettyPrinter({
def pretty_error(self, what: str) -> None: "engine_url": self.url,
prettyPrinter({"engine_url": self.url, "desc_link": "https://github.com/imDMG/qBt_SE",
"desc_link": "https://github.com/imDMG/qBt_SE", "name": f"[{unquote(what)}][Error]: {error}",
"name": f"[{unquote(what)}][Error]: {self.error}", "link": self.url + "error",
"link": self.url + "error", "size": "1 TB", # lol
"size": "1 TB", # lol "seeds": 100,
"seeds": 100, "leech": 100
"leech": 100}) })
self.error = None
# pep8 # pep8

178
engines/rutor.py

@ -1,4 +1,4 @@
# VERSION: 1.6 # VERSION: 1.7
# AUTHORS: imDMG [imdmgg@gmail.com] # AUTHORS: imDMG [imdmgg@gmail.com]
# Rutor.org search engine plugin for qBittorrent # Rutor.org search engine plugin for qBittorrent
@ -14,7 +14,7 @@ from dataclasses import dataclass, field
from html import unescape from html import unescape
from pathlib import Path from pathlib import Path
from tempfile import NamedTemporaryFile from tempfile import NamedTemporaryFile
from typing import Optional, Union from typing import Callable
from urllib.error import URLError, HTTPError from urllib.error import URLError, HTTPError
from urllib.parse import unquote from urllib.parse import unquote
from urllib.request import build_opener, ProxyHandler from urllib.request import build_opener, ProxyHandler
@ -31,12 +31,6 @@ BASEDIR = FILE.parent.absolute()
FILENAME = FILE.stem FILENAME = FILE.stem
FILE_J, FILE_C = [BASEDIR / (FILENAME + fl) for fl in (".json", ".cookie")] FILE_J, FILE_C = [BASEDIR / (FILENAME + fl) for fl in (".json", ".cookie")]
PAGES = 100
def rng(t: int) -> range:
return range(1, -(-t // PAGES))
RE_TORRENTS = re.compile( RE_TORRENTS = re.compile(
r'(?:gai|tum)"><td>(.+?)</td.+?href="(magnet:.+?)".+?href="/' r'(?:gai|tum)"><td>(.+?)</td.+?href="(magnet:.+?)".+?href="/'
@ -46,6 +40,8 @@ RE_TORRENTS = re.compile(
RE_RESULTS = re.compile(r"</b>\sРезультатов\sпоиска\s(\d{1,4})\s", re.S) RE_RESULTS = re.compile(r"</b>\sРезультатов\sпоиска\s(\d{1,4})\s", re.S)
PATTERNS = ("%ssearch/%i/%i/000/0/%s",) PATTERNS = ("%ssearch/%i/%i/000/0/%s",)
PAGES = 100
# base64 encoded image # base64 encoded image
ICON = ("AAABAAEAEBAAAAEAGABoAwAAFgAAACgAAAAQAAAAIAAAAAEAGAAAAAAAAAAAAAAAAAAAAA" ICON = ("AAABAAEAEBAAAAEAGABoAwAAFgAAACgAAAAQAAAAIAAAAAEAGAAAAAAAAAAAAAAAAAAAAA"
"AAAAAAAAAAAAAAAc4AAMwHNdcQ4vsN3fYS2fUY3fUe3fMj4fkk4fco4PYo5fgk7f5gp8Zu" "AAAAAAAAAAAAAAAc4AAMwHNdcQ4vsN3fYS2fUY3fUe3fMj4fkk4fco4PYo5fgk7f5gp8Zu"
@ -76,6 +72,14 @@ logging.basicConfig(
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def rng(t: int) -> range:
return range(1, -(-t // PAGES))
class EngineError(Exception):
...
@dataclass @dataclass
class Config: class Config:
# username: str = "USERNAME" # username: str = "USERNAME"
@ -141,73 +145,24 @@ class Rutor:
"pictures": 3, "pictures": 3,
"books": 11} "books": 11}
# error message
error: Optional[str] = None
# establish connection # establish connection
session = build_opener() session = build_opener()
def __init__(self):
# add proxy handler if needed
if config.proxy:
if any(config.proxies.values()):
self.session.add_handler(ProxyHandler(config.proxies))
logger.debug("Proxy is set!")
else:
self.error = "Proxy enabled, but not set!"
# change user-agent
self.session.addheaders = [("User-Agent", config.ua)]
def search(self, what: str, cat: str = "all") -> None: def search(self, what: str, cat: str = "all") -> None:
if self.error: self._catch_errors(self._search, what, cat)
self.pretty_error(what)
return None
query = PATTERNS[0] % (self.url, 0, self.supported_categories[cat],
what.replace(" ", "+"))
# make first request (maybe it enough)
t0, total = time.time(), self.searching(query, True)
if self.error:
self.pretty_error(what)
return None
# do async requests
if total > PAGES:
query = query.replace("h/0", "h/{}")
qrs = [query.format(x) for x in rng(total)]
with ThreadPoolExecutor(len(qrs)) as executor:
executor.map(self.searching, qrs, timeout=30)
logger.debug(f"--- {time.time() - t0} seconds ---")
logger.info(f"Found torrents: {total}")
def download_torrent(self, url: str) -> None: def download_torrent(self, url: str) -> None:
# Download url self._catch_errors(self._download_torrent, url)
response = self._request(url)
if self.error:
self.pretty_error(url)
return None
# Create a torrent file def searching(self, query: str, first: bool = False) -> int:
with NamedTemporaryFile(suffix=".torrent", delete=False) as fd: page, torrents_found = self._request(query).decode(), -1
fd.write(response)
# return file path
logger.debug(fd.name + " " + url)
print(fd.name + " " + url)
def searching(self, query: str, first: bool = False) -> Union[None, int]:
response = self._request(query)
if self.error:
return None
page, torrents_found = response.decode(), -1
if first: if first:
# firstly we check if there is a result # firstly we check if there is a result
result = RE_RESULTS.search(page) try:
if not result: torrents_found = int(RE_RESULTS.search(page)[1])
self.error = "Unexpected page content" except TypeError:
return None raise EngineError("Unexpected page content")
torrents_found = int(result[1]) if torrents_found <= 0:
if not torrents_found:
return 0 return 0
self.draw(page) self.draw(page)
@ -235,40 +190,87 @@ class Rutor:
"leech": unescape(tor[7]) "leech": unescape(tor[7])
}) })
def _catch_errors(self, handler: Callable, *args: str):
try:
self._init()
handler(*args)
except EngineError as ex:
self.pretty_error(args[0], str(ex))
except Exception as ex:
self.pretty_error(args[0], "Unexpected error, please check logs")
logger.exception(ex)
def _init(self) -> None:
# add proxy handler if needed
if config.proxy:
if not any(config.proxies.values()):
raise EngineError("Proxy enabled, but not set!")
self.session.add_handler(ProxyHandler(config.proxies))
logger.debug("Proxy is set!")
# change user-agent
self.session.addheaders = [("User-Agent", config.ua)]
def _search(self, what: str, cat: str = "all") -> None:
query = PATTERNS[0] % (self.url, 0, self.supported_categories[cat],
what.replace(" ", "+"))
# make first request (maybe it enough)
t0, total = time.time(), self.searching(query, True)
# do async requests
if total > PAGES:
query = query.replace("h/0", "h/{}")
qrs = [query.format(x) for x in rng(total)]
with ThreadPoolExecutor(len(qrs)) as executor:
executor.map(self.searching, qrs, timeout=30)
logger.debug(f"--- {time.time() - t0} seconds ---")
logger.info(f"Found torrents: {total}")
def _download_torrent(self, url: str) -> None:
# Download url
response = self._request(url)
# Create a torrent file
with NamedTemporaryFile(suffix=".torrent", delete=False) as fd:
fd.write(response)
# return file path
logger.debug(fd.name + " " + url)
print(fd.name + " " + url)
def _request( def _request(
self, url: str, data: Optional[bytes] = None, repeated: bool = False self, url: str, data: bytes = None, repeated: bool = False
) -> Union[bytes, None]: ) -> bytes:
try: try:
with self.session.open(url, data, 5) as r: with self.session.open(url, data, 5) as r:
# checking that tracker isn't blocked # checking that tracker isn't blocked
if r.geturl().startswith((self.url, self.url_dl)): if r.geturl().startswith((self.url, self.url_dl)):
return r.read() return r.read()
self.error = f"{url} is blocked. Try another proxy." raise EngineError(f"{url} is blocked. Try another proxy.")
except (URLError, HTTPError) as err: except (URLError, HTTPError) as err:
logger.error(err.reason)
error = str(err.reason) error = str(err.reason)
reason = f"{url} is not response! Maybe it is blocked."
if "timed out" in error and not repeated: if "timed out" in error and not repeated:
logger.debug("Repeating request...") logger.debug("Request timed out. Repeating...")
return self._request(url, data, True) return self._request(url, data, True)
if "no host given" in error: if "no host given" in error:
self.error = "Proxy is bad, try another!" reason = "Proxy is bad, try another!"
elif hasattr(err, "code"): elif hasattr(err, "code"):
self.error = f"Request to {url} failed with status: {err.code}" reason = f"Request to {url} failed with status: {err.code}"
else:
self.error = f"{url} is not response! Maybe it is blocked." raise EngineError(reason)
return None def pretty_error(self, what: str, error: str) -> None:
prettyPrinter({
def pretty_error(self, what: str) -> None: "engine_url": self.url,
prettyPrinter({"engine_url": self.url, "desc_link": "https://github.com/imDMG/qBt_SE",
"desc_link": "https://github.com/imDMG/qBt_SE", "name": f"[{unquote(what)}][Error]: {error}",
"name": f"[{unquote(what)}][Error]: {self.error}", "link": self.url + "error",
"link": self.url + "error", "size": "1 TB", # lol
"size": "1 TB", # lol "seeds": 100,
"seeds": 100, "leech": 100
"leech": 100}) })
self.error = None
# pep8 # pep8

248
engines/rutracker.py

@ -1,4 +1,4 @@
# VERSION: 1.7 # VERSION: 1.8
# AUTHORS: imDMG [imdmgg@gmail.com] # AUTHORS: imDMG [imdmgg@gmail.com]
# rutracker.org search engine plugin for qBittorrent # rutracker.org search engine plugin for qBittorrent
@ -12,10 +12,10 @@ import time
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass, field from dataclasses import dataclass, field
from html import unescape from html import unescape
from http.cookiejar import Cookie, MozillaCookieJar from http.cookiejar import MozillaCookieJar
from pathlib import Path from pathlib import Path
from tempfile import NamedTemporaryFile from tempfile import NamedTemporaryFile
from typing import Optional from typing import Callable
from urllib.error import URLError, HTTPError from urllib.error import URLError, HTTPError
from urllib.parse import urlencode, unquote from urllib.parse import urlencode, unquote
from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler
@ -29,24 +29,19 @@ except ImportError:
FILE = Path(__file__) FILE = Path(__file__)
BASEDIR = FILE.parent.absolute() BASEDIR = FILE.parent.absolute()
FILENAME = FILE.name[:-3] FILENAME = FILE.stem
FILE_J, FILE_C = [BASEDIR / (FILENAME + fl) for fl in [".json", ".cookie"]] FILE_J, FILE_C = [BASEDIR / (FILENAME + fl) for fl in (".json", ".cookie")]
PAGES = 50
def rng(t: int) -> range:
return range(PAGES, -(-t // PAGES) * PAGES, PAGES)
RE_TORRENTS = re.compile( RE_TORRENTS = re.compile(
r'<a\sdata-topic_id="(\d+?)".+?">(.+?)</a.+?tor-size"\sdata-ts_text="(\d+?)' r'<a\sdata-topic_id="(\d+?)".+?">(.+?)</a.+?tor-size"\sdata-ts_text="(\d+?)'
r'">.+?data-ts_text="([-0-9]+?)">.+?Личи">(\d+?)</.+?ata-ts_text="(\d+?)">', r'">.+?data-ts_text="([-\d]+?)">.+?Личи">(\d+?)</.+?data-ts_text="(\d+?)">',
re.S re.S
) )
RE_RESULTS = re.compile(r"Результатов\sпоиска:\s(\d{1,3})\s<span", re.S) RE_RESULTS = re.compile(r"Результатов\sпоиска:\s(\d{1,3})\s<span", re.S)
PATTERNS = ("%stracker.php?nm=%s&c=%s", "%s&start=%s") PATTERNS = ("%stracker.php?nm=%s&c=%s", "%s&start=%s")
PAGES = 50
# base64 encoded image # base64 encoded image
ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAAAAAABMLAAATCw" ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAAAAAABMLAAATCw"
"AAAAAAAAAAAAAAAAAAAAAAAAAAAABs3wUAY8wFBGPMBQN2sw8A9kA6AOdOOl/nTjo/5046" "AAAAAAAAAAAAAAAAAAAAAAAAAAAABs3wUAY8wFBGPMBQN2sw8A9kA6AOdOOl/nTjo/5046"
@ -81,12 +76,19 @@ logging.basicConfig(
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def rng(t: int) -> range:
return range(PAGES, -(-t // PAGES) * PAGES, PAGES)
class EngineError(Exception):
...
@dataclass @dataclass
class Config: class Config:
username: str = "USERNAME" username: str = "USERNAME"
password: str = "PASSWORD" password: str = "PASSWORD"
torrent_date: bool = True torrent_date: bool = True
# magnet: bool = False
proxy: bool = False proxy: bool = False
# dynamic_proxy: bool = True # dynamic_proxy: bool = True
proxies: dict = field(default_factory=lambda: {"http": "", "https": ""}) proxies: dict = field(default_factory=lambda: {"http": "", "https": ""})
@ -103,7 +105,7 @@ class Config:
(BASEDIR / f"{FILENAME}.ico").write_bytes(base64.b64decode(ICON)) (BASEDIR / f"{FILENAME}.ico").write_bytes(base64.b64decode(ICON))
def to_str(self) -> str: def to_str(self) -> str:
return json.dumps(self.to_dict(), indent=4, sort_keys=False) return json.dumps(self.to_dict(), indent=4)
def to_dict(self) -> dict: def to_dict(self) -> dict:
return {self._to_camel(k): v for k, v in self.__dict__.items()} return {self._to_camel(k): v for k, v in self.__dict__.items()}
@ -139,86 +141,20 @@ class Rutracker:
url_login = url + "login.php" url_login = url + "login.php"
supported_categories = {"all": "-1"} supported_categories = {"all": "-1"}
# error message
error: Optional[str] = None
# cookies # cookies
mcj = MozillaCookieJar() mcj = MozillaCookieJar()
# establish connection # establish connection
session = build_opener(HTTPCookieProcessor(mcj)) session = build_opener(HTTPCookieProcessor(mcj))
def __init__(self):
# add proxy handler if needed
if config.proxy:
if any(config.proxies.values()):
self.session.add_handler(ProxyHandler(config.proxies))
logger.debug("Proxy is set!")
else:
self.error = "Proxy enabled, but not set!"
# change user-agent
self.session.addheaders = [("User-Agent", config.ua)]
# load local cookies
try:
self.mcj.load(FILE_C, ignore_discard=True)
if "bb_session" in [cookie.name for cookie in self.mcj]:
# if cookie.expires < int(time.time())
logger.info("Local cookies is loaded")
else:
logger.info("Local cookies expired or bad")
logger.debug(f"That we have: {[cookie for cookie in self.mcj]}")
self.mcj.clear()
self.login()
except FileNotFoundError:
self.login()
def search(self, what: str, cat: str = "all") -> None: def search(self, what: str, cat: str = "all") -> None:
if self.error: self._catch_errors(self._search, what, cat)
self.pretty_error(what)
return None
query = PATTERNS[0] % (self.url, what.replace(" ", "+"),
self.supported_categories[cat])
# make first request (maybe it enough)
t0, total = time.time(), self.searching(query, True)
if self.error:
self.pretty_error(what)
return None
# do async requests
if total > PAGES:
qrs = [PATTERNS[1] % (query, x) for x in rng(total)]
with ThreadPoolExecutor(len(qrs)) as executor:
executor.map(self.searching, qrs, timeout=30)
logger.debug(f"--- {time.time() - t0} seconds ---")
logger.info(f"Found torrents: {total}")
def download_torrent(self, url: str) -> None: def download_torrent(self, url: str) -> None:
# Download url self._catch_errors(self._download_torrent, url)
response = self._request(url)
if self.error:
self.pretty_error(url)
return None
# Create a torrent file
with NamedTemporaryFile(suffix=".torrent", delete=False) as fd:
fd.write(response)
# return file path
logger.debug(fd.name + " " + url)
print(fd.name + " " + url)
def login(self) -> None: def login(self) -> None:
if self.error:
return None
self.mcj.clear() self.mcj.clear()
# if we wanna use https we mast add bb_ssl=1 to cookie
self.mcj.set_cookie(Cookie(0, "bb_ssl", "1", None, False,
".rutracker.org", True, True, "/forum/",
True, True, None, False, None, None, {}))
form_data = {"login_username": config.username, form_data = {"login_username": config.username,
"login_password": config.password, "login_password": config.password,
"login": "Вход"} "login": "Вход"}
@ -227,42 +163,31 @@ class Rutracker:
data_encoded = urlencode(form_data, encoding="cp1251").encode() data_encoded = urlencode(form_data, encoding="cp1251").encode()
logger.debug(f"Login. Data after: {data_encoded}") logger.debug(f"Login. Data after: {data_encoded}")
self._request(self.url_login, data_encoded) self._request(self.url_login, data_encoded)
if self.error:
return None
logger.debug(f"That we have: {[cookie for cookie in self.mcj]}") logger.debug(f"That we have: {[cookie for cookie in self.mcj]}")
if "bb_session" in [cookie.name for cookie in self.mcj]: if "bb_session" not in [cookie.name for cookie in self.mcj]:
self.mcj.save(FILE_C, ignore_discard=True, ignore_expires=True) raise EngineError(
logger.info("We successfully authorized") "We not authorized, please check your credentials!"
else: )
self.error = "We not authorized, please check your credentials!" self.mcj.save(FILE_C, ignore_discard=True, ignore_expires=True)
logger.warning(self.error) logger.info("We successfully authorized")
def searching(self, query: str, first: bool = False) -> Optional[int]: def searching(self, query: str, first: bool = False) -> int:
response = self._request(query) page, torrents_found = self._request(query).decode("cp1251"), -1
if self.error:
return None
page, torrents_found = response.decode("cp1251"), -1
if first: if first:
# check login status
if "log-out-icon" not in page: if "log-out-icon" not in page:
if "login-form-full" not in page: if "login-form-full" not in page:
self.error = "Unexpected page content" raise EngineError("Unexpected page content")
return None
logger.debug("Looks like we lost session id, lets login") logger.debug("Looks like we lost session id, lets login")
self.login() self.login()
if self.error:
return None
# retry request because guests cant search # retry request because guests cant search
response = self._request(query) page = self._request(query).decode("cp1251")
if self.error:
return None
page = response.decode("cp1251")
# firstly we check if there is a result # firstly we check if there is a result
result = RE_RESULTS.search(page) try:
if not result: torrents_found = int(RE_RESULTS.search(page)[1])
self.error = "Unexpected page content" except TypeError:
return None raise EngineError("Unexpected page content")
torrents_found = int(result[1]) if torrents_found <= 0:
if not torrents_found:
return 0 return 0
self.draw(page) self.draw(page)
@ -283,40 +208,97 @@ class Rutracker:
"leech": tor[4] "leech": tor[4]
}) })
def _catch_errors(self, handler: Callable, *args: str):
try:
self._init()
handler(*args)
except EngineError as ex:
self.pretty_error(args[0], str(ex))
except Exception as ex:
self.pretty_error(args[0], "Unexpected error, please check logs")
logger.exception(ex)
def _init(self) -> None:
# add proxy handler if needed
if config.proxy:
if not any(config.proxies.values()):
raise EngineError("Proxy enabled, but not set!")
self.session.add_handler(ProxyHandler(config.proxies))
logger.debug("Proxy is set!")
# change user-agent
self.session.addheaders = [("User-Agent", config.ua)]
# load local cookies
try:
self.mcj.load(FILE_C, ignore_discard=True)
if "bb_session" in [cookie.name for cookie in self.mcj]:
# if cookie.expires < int(time.time())
return logger.info("Local cookies is loaded")
logger.info("Local cookies expired or bad, try to login")
logger.debug(f"That we have: {[cookie for cookie in self.mcj]}")
except FileNotFoundError:
logger.info("Local cookies not exists, try to login")
self.login()
def _search(self, what: str, cat: str = "all") -> None:
query = PATTERNS[0] % (self.url, what.replace(" ", "+"),
self.supported_categories[cat])
# make first request (maybe it enough)
t0, total = time.time(), self.searching(query, True)
# do async requests
if total > PAGES:
qrs = [PATTERNS[1] % (query, x) for x in rng(total)]
with ThreadPoolExecutor(len(qrs)) as executor:
executor.map(self.searching, qrs, timeout=30)
logger.debug(f"--- {time.time() - t0} seconds ---")
logger.info(f"Found torrents: {total}")
def _download_torrent(self, url: str) -> None:
response = self._request(url)
# Create a torrent file
with NamedTemporaryFile(suffix=".torrent", delete=False) as fd:
fd.write(response)
# return file path
logger.debug(fd.name + " " + url)
print(fd.name + " " + url)
def _request( def _request(
self, url: str, data: Optional[bytes] = None, repeated: bool = False self, url: str, data: bytes = None, repeated: bool = False
) -> Optional[bytes]: ) -> bytes:
try: try:
with self.session.open(url, data, 5) as r: with self.session.open(url, data, 5) as r:
# checking that tracker isn't blocked # checking that tracker isn't blocked
if r.geturl().startswith((self.url, self.url_dl)): if r.geturl().startswith((self.url, self.url_dl)):
return r.read() return r.read()
self.error = f"{url} is blocked. Try another proxy." raise EngineError(f"{url} is blocked. Try another proxy.")
except (URLError, HTTPError) as err: except (URLError, HTTPError) as err:
logger.error(err.reason)
error = str(err.reason) error = str(err.reason)
reason = f"{url} is not response! Maybe it is blocked."
if "timed out" in error and not repeated: if "timed out" in error and not repeated:
logger.debug("Repeating request...") logger.debug("Request timed out. Repeating...")
return self._request(url, data, True) return self._request(url, data, True)
if "no host given" in error: if "no host given" in error:
self.error = "Proxy is bad, try another!" reason = "Proxy is bad, try another!"
elif hasattr(err, "code"): elif hasattr(err, "code"):
self.error = f"Request to {url} failed with status: {err.code}" reason = f"Request to {url} failed with status: {err.code}"
else:
self.error = f"{url} is not response! Maybe it is blocked." raise EngineError(reason)
return None def pretty_error(self, what: str, error: str) -> None:
prettyPrinter({
def pretty_error(self, what: str) -> None: "engine_url": self.url,
prettyPrinter({"engine_url": self.url, "desc_link": "https://github.com/imDMG/qBt_SE",
"desc_link": "https://github.com/imDMG/qBt_SE", "name": f"[{unquote(what)}][Error]: {error}",
"name": f"[{unquote(what)}][Error]: {self.error}", "link": self.url + "error",
"link": self.url + "error", "size": "1 TB", # lol
"size": "1 TB", # lol "seeds": 100,
"seeds": 100, "leech": 100
"leech": 100}) })
self.error = None
# pep8 # pep8

4
settings_gui.py

@ -22,10 +22,10 @@ class EngineSettingsGUI:
self.username = tk.StringVar(value=self.config.get("username", "")) self.username = tk.StringVar(value=self.config.get("username", ""))
self.password = tk.StringVar(value=self.config.get("password", "")) self.password = tk.StringVar(value=self.config.get("password", ""))
self.proxy_http = tk.StringVar( self.proxy_http = tk.StringVar(
value=self.config.get("proxies").get("http", "") value=self.config.get("proxies", dict()).get("http", "")
) )
self.proxy_https = tk.StringVar( self.proxy_https = tk.StringVar(
value=self.config.get("proxies").get("https", "") value=self.config.get("proxies", dict()).get("https", "")
) )
self.date = tk.BooleanVar(value=self.config.get("torrentDate", True)) self.date = tk.BooleanVar(value=self.config.get("torrentDate", True))

Loading…
Cancel
Save