Browse Source

better handling errors

master
imDMG 1 year ago
parent
commit
4b43b63ef3
  1. 6
      README.md
  2. 238
      engines/kinozal.py
  3. 178
      engines/rutor.py
  4. 248
      engines/rutracker.py
  5. 4
      settings_gui.py

6
README.md

@ -1,13 +1,13 @@ @@ -1,13 +1,13 @@
[![Python 3.7+](https://img.shields.io/badge/python-%3E%3D%20v3.7-blue)](https://www.python.org/downloads/release/python-370/)
# qBittorrent plugins
## Rutracker.org ![v1.7](https://img.shields.io/badge/v1.7-blue)
## Rutracker.org ![v1.8](https://img.shields.io/badge/v1.7-blue)
Biggest russian torrent tracker.
## Rutor.org ![v1.6](https://img.shields.io/badge/v1.6-blue)
## Rutor.org ![v1.7](https://img.shields.io/badge/v1.6-blue)
Popular free russian torrent tracker.
## Kinozal.tv ![v2.10](https://img.shields.io/badge/v2.10-blue)
## Kinozal.tv ![v2.11](https://img.shields.io/badge/v2.10-blue)
Russian torrent tracker mostly directed on movies, but have other categories.
The site has a restriction on downloading torrent files (10 by default or so), so I added the ability to open the magnet link instead the file.

238
engines/kinozal.py

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
# VERSION: 2.10
# VERSION: 2.11
# AUTHORS: imDMG [imdmgg@gmail.com]
# Kinozal.tv search engine plugin for qBittorrent
@ -17,7 +17,7 @@ from html import unescape @@ -17,7 +17,7 @@ from html import unescape
from http.cookiejar import MozillaCookieJar
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Union, Optional
from typing import Callable
from urllib.error import URLError, HTTPError
from urllib.parse import urlencode, unquote
from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler
@ -43,8 +43,6 @@ BASEDIR = FILE.parent.absolute() @@ -43,8 +43,6 @@ BASEDIR = FILE.parent.absolute()
FILENAME = FILE.stem
FILE_J, FILE_C = [BASEDIR / (FILENAME + fl) for fl in (".json", ".cookie")]
PAGES = 50
RE_TORRENTS = re.compile(
r'nam"><a\s+?href="/(.+?)"\s+?class="r\d">(.+?)</a>.+?s\'>.+?s\'>(.+?)<.+?'
r'sl_s\'>(\d+?)<.+?sl_p\'>(\d+?)<.+?s\'>(.+?)</td>', re.S
@ -52,6 +50,8 @@ RE_TORRENTS = re.compile( @@ -52,6 +50,8 @@ RE_TORRENTS = re.compile(
RE_RESULTS = re.compile(r"</span>Найдено\s+?(\d+?)\s+?раздач", re.S)
PATTERNS = ("%sbrowse.php?s=%s&c=%s", "%s&page=%s")
PAGES = 50
# base64 encoded image
ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAQAQAAAAAAAAAAA"
"AAAAAAAAAAAACARztMgEc7/4BHO/+ARztMAAAAAIBHO0yhd2n/gEc7/6F3af+ARztMAAAA"
@ -81,6 +81,10 @@ def rng(t: int) -> range: @@ -81,6 +81,10 @@ def rng(t: int) -> range:
return range(1, -(-t // PAGES))
class EngineError(Exception):
...
@dataclass
class Config:
username: str = "USERNAME"
@ -145,88 +149,19 @@ class Kinozal: @@ -145,88 +149,19 @@ class Kinozal:
"anime": "20",
"software": "32"}
# error message
error: Optional[str] = None
# cookies
mcj = MozillaCookieJar()
# establish connection
session = build_opener(HTTPCookieProcessor(mcj))
def __init__(self):
# add proxy handler if needed
if config.proxy:
if any(config.proxies.values()):
self.session.add_handler(ProxyHandler(config.proxies))
logger.debug("Proxy is set!")
else:
self.error = "Proxy enabled, but not set!"
# change user-agent
self.session.addheaders = [("User-Agent", config.ua)]
# load local cookies
try:
self.mcj.load(FILE_C, ignore_discard=True)
if "uid" in [cookie.name for cookie in self.mcj]:
# if cookie.expires < int(time.time())
logger.info("Local cookies is loaded")
else:
logger.info("Local cookies expired or bad")
logger.debug(f"That we have: {[cookie for cookie in self.mcj]}")
self.mcj.clear()
self.login()
except FileNotFoundError:
self.login()
def search(self, what: str, cat: str = "all") -> None:
if self.error:
self.pretty_error(what)
return None
query = PATTERNS[0] % (self.url, what.replace(" ", "+"),
self.supported_categories[cat])
# make first request (maybe it enough)
t0, total = time.time(), self.searching(query, True)
if self.error:
self.pretty_error(what)
return None
# do async requests
if total > PAGES:
qrs = [PATTERNS[1] % (query, x) for x in rng(total)]
with ThreadPoolExecutor(len(qrs)) as executor:
executor.map(self.searching, qrs, timeout=30)
logger.debug(f"--- {time.time() - t0} seconds ---")
logger.info(f"Found torrents: {total}")
self._catch_errors(self._search, what, cat)
def download_torrent(self, url: str) -> None:
# choose download method
if config.magnet:
url = "%sget_srv_details.php?action=2&id=%s" % (self.url,
url.split("=")[1])
response = self._request(url)
if self.error:
self.pretty_error(url)
return None
if config.magnet:
if response.startswith(b"\x1f\x8b\x08"):
response = gzip.decompress(response)
path = "magnet:?xt=urn:btih:" + response.decode()[18:58]
else:
# Create a torrent file
with NamedTemporaryFile(suffix=".torrent", delete=False) as fd:
fd.write(response)
path = fd.name
# return magnet link / file path
logger.debug(path + " " + url)
print(path + " " + url)
self._catch_errors(self._download_torrent, url)
def login(self) -> None:
if self.error:
return None
self.mcj.clear()
form_data = {"username": config.username, "password": config.password}
logger.debug(f"Login. Data before: {form_data}")
@ -235,20 +170,16 @@ class Kinozal: @@ -235,20 +170,16 @@ class Kinozal:
logger.debug(f"Login. Data after: {data_encoded}")
self._request(self.url_login, data_encoded)
if self.error:
return None
logger.debug(f"That we have: {[cookie for cookie in self.mcj]}")
if "uid" in [cookie.name for cookie in self.mcj]:
self.mcj.save(FILE_C, ignore_discard=True, ignore_expires=True)
logger.info("We successfully authorized")
else:
self.error = "We not authorized, please check your credentials!"
logger.warning(self.error)
def searching(self, query: str, first: bool = False) -> Union[None, int]:
if "uid" not in [cookie.name for cookie in self.mcj]:
raise EngineError(
"We not authorized, please check your credentials!"
)
self.mcj.save(FILE_C, ignore_discard=True, ignore_expires=True)
logger.info("We successfully authorized")
def searching(self, query: str, first: bool = False) -> int:
response = self._request(query)
if self.error:
return None
if response.startswith(b"\x1f\x8b\x08"):
response = gzip.decompress(response)
page, torrents_found = response.decode("cp1251"), -1
@ -256,17 +187,13 @@ class Kinozal: @@ -256,17 +187,13 @@ class Kinozal:
# check login status
if "Гость! ( Зарегистрируйтесь )" in page:
logger.debug("Looks like we lost session id, lets login")
self.mcj.clear()
self.login()
if self.error:
return None
# firstly we check if there is a result
result = RE_RESULTS.search(page)
if not result:
self.error = "Unexpected page content"
return None
torrents_found = int(result[1])
if not torrents_found:
try:
torrents_found = int(RE_RESULTS.search(page)[1])
except TypeError:
raise EngineError("Unexpected page content")
if torrents_found <= 0:
return 0
self.draw(page)
@ -276,6 +203,8 @@ class Kinozal: @@ -276,6 +203,8 @@ class Kinozal:
_part = partial(time.strftime, "%y.%m.%d")
# yeah this is yesterday
yesterday = _part(time.localtime(time.time() - 86400))
# replace size units
table = {"Т": "T", "Г": "G", "М": "M", "К": "K", "Б": "B"}
for tor in RE_TORRENTS.findall(html):
torrent_date = ""
if config.torrent_date:
@ -288,9 +217,6 @@ class Kinozal: @@ -288,9 +217,6 @@ class Kinozal:
torrent_date = _part(time.strptime(ct, "%d.%m.%Y"))
torrent_date = f"[{torrent_date}] "
# replace size units
table = {"Т": "T", "Г": "G", "М": "M", "К": "K", "Б": "B"}
prettyPrinter({
"engine_url": self.url,
"desc_link": self.url + tor[0],
@ -301,40 +227,108 @@ class Kinozal: @@ -301,40 +227,108 @@ class Kinozal:
"leech": tor[4]
})
def _catch_errors(self, handler: Callable, *args: str):
try:
self._init()
handler(*args)
except EngineError as ex:
self.pretty_error(args[0], str(ex))
except Exception as ex:
self.pretty_error(args[0], "Unexpected error, please check logs")
logger.exception(ex)
def _init(self) -> None:
# add proxy handler if needed
if config.proxy:
if not any(config.proxies.values()):
raise EngineError("Proxy enabled, but not set!")
self.session.add_handler(ProxyHandler(config.proxies))
logger.debug("Proxy is set!")
# change user-agent
self.session.addheaders = [("User-Agent", config.ua)]
# load local cookies
try:
self.mcj.load(FILE_C, ignore_discard=True)
if "uid" in [cookie.name for cookie in self.mcj]:
# if cookie.expires < int(time.time())
return logger.info("Local cookies is loaded")
logger.info("Local cookies expired or bad, try to login")
logger.debug(f"That we have: {[cookie for cookie in self.mcj]}")
except FileNotFoundError:
logger.info("Local cookies not exists, try to login")
self.login()
def _search(self, what: str, cat: str = "all") -> None:
query = PATTERNS[0] % (self.url, what.replace(" ", "+"),
self.supported_categories[cat])
# make first request (maybe it enough)
t0, total = time.time(), self.searching(query, True)
# do async requests
if total > PAGES:
qrs = [PATTERNS[1] % (query, x) for x in rng(total)]
with ThreadPoolExecutor(len(qrs)) as executor:
executor.map(self.searching, qrs, timeout=30)
logger.debug(f"--- {time.time() - t0} seconds ---")
logger.info(f"Found torrents: {total}")
def _download_torrent(self, url: str) -> None:
# choose download method
if config.magnet:
url = "%sget_srv_details.php?action=2&id=%s" % (self.url,
url.split("=")[1])
response = self._request(url)
if config.magnet:
if response.startswith(b"\x1f\x8b\x08"):
response = gzip.decompress(response)
path = "magnet:?xt=urn:btih:" + response.decode()[18:58]
else:
# Create a torrent file
with NamedTemporaryFile(suffix=".torrent", delete=False) as fd:
fd.write(response)
path = fd.name
# return magnet link / file path
logger.debug(path + " " + url)
print(path + " " + url)
def _request(
self, url: str, data: Optional[bytes] = None, repeated: bool = False
) -> Union[bytes, None]:
self, url: str, data: bytes = None, repeated: bool = False
) -> bytes:
try:
with self.session.open(url, data, 5) as r:
# checking that tracker isn't blocked
if r.geturl().startswith((self.url, self.url_dl)):
return r.read()
self.error = f"{url} is blocked. Try another proxy."
raise EngineError(f"{url} is blocked. Try another proxy.")
except (URLError, HTTPError) as err:
logger.error(err.reason)
error = str(err.reason)
reason = f"{url} is not response! Maybe it is blocked."
if "timed out" in error and not repeated:
logger.debug("Repeating request...")
logger.debug("Request timed out. Repeating...")
return self._request(url, data, True)
if "no host given" in error:
self.error = "Proxy is bad, try another!"
reason = "Proxy is bad, try another!"
elif hasattr(err, "code"):
self.error = f"Request to {url} failed with status: {err.code}"
else:
self.error = f"{url} is not response! Maybe it is blocked."
return None
def pretty_error(self, what: str) -> None:
prettyPrinter({"engine_url": self.url,
"desc_link": "https://github.com/imDMG/qBt_SE",
"name": f"[{unquote(what)}][Error]: {self.error}",
"link": self.url + "error",
"size": "1 TB", # lol
"seeds": 100,
"leech": 100})
self.error = None
reason = f"Request to {url} failed with status: {err.code}"
raise EngineError(reason)
def pretty_error(self, what: str, error: str) -> None:
prettyPrinter({
"engine_url": self.url,
"desc_link": "https://github.com/imDMG/qBt_SE",
"name": f"[{unquote(what)}][Error]: {error}",
"link": self.url + "error",
"size": "1 TB", # lol
"seeds": 100,
"leech": 100
})
# pep8

178
engines/rutor.py

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
# VERSION: 1.6
# VERSION: 1.7
# AUTHORS: imDMG [imdmgg@gmail.com]
# Rutor.org search engine plugin for qBittorrent
@ -14,7 +14,7 @@ from dataclasses import dataclass, field @@ -14,7 +14,7 @@ from dataclasses import dataclass, field
from html import unescape
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Optional, Union
from typing import Callable
from urllib.error import URLError, HTTPError
from urllib.parse import unquote
from urllib.request import build_opener, ProxyHandler
@ -31,12 +31,6 @@ BASEDIR = FILE.parent.absolute() @@ -31,12 +31,6 @@ BASEDIR = FILE.parent.absolute()
FILENAME = FILE.stem
FILE_J, FILE_C = [BASEDIR / (FILENAME + fl) for fl in (".json", ".cookie")]
PAGES = 100
def rng(t: int) -> range:
return range(1, -(-t // PAGES))
RE_TORRENTS = re.compile(
r'(?:gai|tum)"><td>(.+?)</td.+?href="(magnet:.+?)".+?href="/'
@ -46,6 +40,8 @@ RE_TORRENTS = re.compile( @@ -46,6 +40,8 @@ RE_TORRENTS = re.compile(
RE_RESULTS = re.compile(r"</b>\sРезультатов\sпоиска\s(\d{1,4})\s", re.S)
PATTERNS = ("%ssearch/%i/%i/000/0/%s",)
PAGES = 100
# base64 encoded image
ICON = ("AAABAAEAEBAAAAEAGABoAwAAFgAAACgAAAAQAAAAIAAAAAEAGAAAAAAAAAAAAAAAAAAAAA"
"AAAAAAAAAAAAAAAc4AAMwHNdcQ4vsN3fYS2fUY3fUe3fMj4fkk4fco4PYo5fgk7f5gp8Zu"
@ -76,6 +72,14 @@ logging.basicConfig( @@ -76,6 +72,14 @@ logging.basicConfig(
logger = logging.getLogger(__name__)
def rng(t: int) -> range:
return range(1, -(-t // PAGES))
class EngineError(Exception):
...
@dataclass
class Config:
# username: str = "USERNAME"
@ -141,73 +145,24 @@ class Rutor: @@ -141,73 +145,24 @@ class Rutor:
"pictures": 3,
"books": 11}
# error message
error: Optional[str] = None
# establish connection
session = build_opener()
def __init__(self):
# add proxy handler if needed
if config.proxy:
if any(config.proxies.values()):
self.session.add_handler(ProxyHandler(config.proxies))
logger.debug("Proxy is set!")
else:
self.error = "Proxy enabled, but not set!"
# change user-agent
self.session.addheaders = [("User-Agent", config.ua)]
def search(self, what: str, cat: str = "all") -> None:
if self.error:
self.pretty_error(what)
return None
query = PATTERNS[0] % (self.url, 0, self.supported_categories[cat],
what.replace(" ", "+"))
# make first request (maybe it enough)
t0, total = time.time(), self.searching(query, True)
if self.error:
self.pretty_error(what)
return None
# do async requests
if total > PAGES:
query = query.replace("h/0", "h/{}")
qrs = [query.format(x) for x in rng(total)]
with ThreadPoolExecutor(len(qrs)) as executor:
executor.map(self.searching, qrs, timeout=30)
logger.debug(f"--- {time.time() - t0} seconds ---")
logger.info(f"Found torrents: {total}")
self._catch_errors(self._search, what, cat)
def download_torrent(self, url: str) -> None:
# Download url
response = self._request(url)
if self.error:
self.pretty_error(url)
return None
self._catch_errors(self._download_torrent, url)
# Create a torrent file
with NamedTemporaryFile(suffix=".torrent", delete=False) as fd:
fd.write(response)
# return file path
logger.debug(fd.name + " " + url)
print(fd.name + " " + url)
def searching(self, query: str, first: bool = False) -> Union[None, int]:
response = self._request(query)
if self.error:
return None
page, torrents_found = response.decode(), -1
def searching(self, query: str, first: bool = False) -> int:
page, torrents_found = self._request(query).decode(), -1
if first:
# firstly we check if there is a result
result = RE_RESULTS.search(page)
if not result:
self.error = "Unexpected page content"
return None
torrents_found = int(result[1])
if not torrents_found:
try:
torrents_found = int(RE_RESULTS.search(page)[1])
except TypeError:
raise EngineError("Unexpected page content")
if torrents_found <= 0:
return 0
self.draw(page)
@ -235,40 +190,87 @@ class Rutor: @@ -235,40 +190,87 @@ class Rutor:
"leech": unescape(tor[7])
})
def _catch_errors(self, handler: Callable, *args: str):
try:
self._init()
handler(*args)
except EngineError as ex:
self.pretty_error(args[0], str(ex))
except Exception as ex:
self.pretty_error(args[0], "Unexpected error, please check logs")
logger.exception(ex)
def _init(self) -> None:
# add proxy handler if needed
if config.proxy:
if not any(config.proxies.values()):
raise EngineError("Proxy enabled, but not set!")
self.session.add_handler(ProxyHandler(config.proxies))
logger.debug("Proxy is set!")
# change user-agent
self.session.addheaders = [("User-Agent", config.ua)]
def _search(self, what: str, cat: str = "all") -> None:
query = PATTERNS[0] % (self.url, 0, self.supported_categories[cat],
what.replace(" ", "+"))
# make first request (maybe it enough)
t0, total = time.time(), self.searching(query, True)
# do async requests
if total > PAGES:
query = query.replace("h/0", "h/{}")
qrs = [query.format(x) for x in rng(total)]
with ThreadPoolExecutor(len(qrs)) as executor:
executor.map(self.searching, qrs, timeout=30)
logger.debug(f"--- {time.time() - t0} seconds ---")
logger.info(f"Found torrents: {total}")
def _download_torrent(self, url: str) -> None:
# Download url
response = self._request(url)
# Create a torrent file
with NamedTemporaryFile(suffix=".torrent", delete=False) as fd:
fd.write(response)
# return file path
logger.debug(fd.name + " " + url)
print(fd.name + " " + url)
def _request(
self, url: str, data: Optional[bytes] = None, repeated: bool = False
) -> Union[bytes, None]:
self, url: str, data: bytes = None, repeated: bool = False
) -> bytes:
try:
with self.session.open(url, data, 5) as r:
# checking that tracker isn't blocked
if r.geturl().startswith((self.url, self.url_dl)):
return r.read()
self.error = f"{url} is blocked. Try another proxy."
raise EngineError(f"{url} is blocked. Try another proxy.")
except (URLError, HTTPError) as err:
logger.error(err.reason)
error = str(err.reason)
reason = f"{url} is not response! Maybe it is blocked."
if "timed out" in error and not repeated:
logger.debug("Repeating request...")
logger.debug("Request timed out. Repeating...")
return self._request(url, data, True)
if "no host given" in error:
self.error = "Proxy is bad, try another!"
reason = "Proxy is bad, try another!"
elif hasattr(err, "code"):
self.error = f"Request to {url} failed with status: {err.code}"
else:
self.error = f"{url} is not response! Maybe it is blocked."
return None
def pretty_error(self, what: str) -> None:
prettyPrinter({"engine_url": self.url,
"desc_link": "https://github.com/imDMG/qBt_SE",
"name": f"[{unquote(what)}][Error]: {self.error}",
"link": self.url + "error",
"size": "1 TB", # lol
"seeds": 100,
"leech": 100})
self.error = None
reason = f"Request to {url} failed with status: {err.code}"
raise EngineError(reason)
def pretty_error(self, what: str, error: str) -> None:
prettyPrinter({
"engine_url": self.url,
"desc_link": "https://github.com/imDMG/qBt_SE",
"name": f"[{unquote(what)}][Error]: {error}",
"link": self.url + "error",
"size": "1 TB", # lol
"seeds": 100,
"leech": 100
})
# pep8

248
engines/rutracker.py

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
# VERSION: 1.7
# VERSION: 1.8
# AUTHORS: imDMG [imdmgg@gmail.com]
# rutracker.org search engine plugin for qBittorrent
@ -12,10 +12,10 @@ import time @@ -12,10 +12,10 @@ import time
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass, field
from html import unescape
from http.cookiejar import Cookie, MozillaCookieJar
from http.cookiejar import MozillaCookieJar
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Optional
from typing import Callable
from urllib.error import URLError, HTTPError
from urllib.parse import urlencode, unquote
from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler
@ -29,24 +29,19 @@ except ImportError: @@ -29,24 +29,19 @@ except ImportError:
FILE = Path(__file__)
BASEDIR = FILE.parent.absolute()
FILENAME = FILE.name[:-3]
FILE_J, FILE_C = [BASEDIR / (FILENAME + fl) for fl in [".json", ".cookie"]]
PAGES = 50
def rng(t: int) -> range:
return range(PAGES, -(-t // PAGES) * PAGES, PAGES)
FILENAME = FILE.stem
FILE_J, FILE_C = [BASEDIR / (FILENAME + fl) for fl in (".json", ".cookie")]
RE_TORRENTS = re.compile(
r'<a\sdata-topic_id="(\d+?)".+?">(.+?)</a.+?tor-size"\sdata-ts_text="(\d+?)'
r'">.+?data-ts_text="([-0-9]+?)">.+?Личи">(\d+?)</.+?ata-ts_text="(\d+?)">',
r'">.+?data-ts_text="([-\d]+?)">.+?Личи">(\d+?)</.+?data-ts_text="(\d+?)">',
re.S
)
RE_RESULTS = re.compile(r"Результатов\sпоиска:\s(\d{1,3})\s<span", re.S)
PATTERNS = ("%stracker.php?nm=%s&c=%s", "%s&start=%s")
PAGES = 50
# base64 encoded image
ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAAAAAABMLAAATCw"
"AAAAAAAAAAAAAAAAAAAAAAAAAAAABs3wUAY8wFBGPMBQN2sw8A9kA6AOdOOl/nTjo/5046"
@ -81,12 +76,19 @@ logging.basicConfig( @@ -81,12 +76,19 @@ logging.basicConfig(
logger = logging.getLogger(__name__)
def rng(t: int) -> range:
return range(PAGES, -(-t // PAGES) * PAGES, PAGES)
class EngineError(Exception):
...
@dataclass
class Config:
username: str = "USERNAME"
password: str = "PASSWORD"
torrent_date: bool = True
# magnet: bool = False
proxy: bool = False
# dynamic_proxy: bool = True
proxies: dict = field(default_factory=lambda: {"http": "", "https": ""})
@ -103,7 +105,7 @@ class Config: @@ -103,7 +105,7 @@ class Config:
(BASEDIR / f"{FILENAME}.ico").write_bytes(base64.b64decode(ICON))
def to_str(self) -> str:
return json.dumps(self.to_dict(), indent=4, sort_keys=False)
return json.dumps(self.to_dict(), indent=4)
def to_dict(self) -> dict:
return {self._to_camel(k): v for k, v in self.__dict__.items()}
@ -139,86 +141,20 @@ class Rutracker: @@ -139,86 +141,20 @@ class Rutracker:
url_login = url + "login.php"
supported_categories = {"all": "-1"}
# error message
error: Optional[str] = None
# cookies
mcj = MozillaCookieJar()
# establish connection
session = build_opener(HTTPCookieProcessor(mcj))
def __init__(self):
# add proxy handler if needed
if config.proxy:
if any(config.proxies.values()):
self.session.add_handler(ProxyHandler(config.proxies))
logger.debug("Proxy is set!")
else:
self.error = "Proxy enabled, but not set!"
# change user-agent
self.session.addheaders = [("User-Agent", config.ua)]
# load local cookies
try:
self.mcj.load(FILE_C, ignore_discard=True)
if "bb_session" in [cookie.name for cookie in self.mcj]:
# if cookie.expires < int(time.time())
logger.info("Local cookies is loaded")
else:
logger.info("Local cookies expired or bad")
logger.debug(f"That we have: {[cookie for cookie in self.mcj]}")
self.mcj.clear()
self.login()
except FileNotFoundError:
self.login()
def search(self, what: str, cat: str = "all") -> None:
if self.error:
self.pretty_error(what)
return None
query = PATTERNS[0] % (self.url, what.replace(" ", "+"),
self.supported_categories[cat])
# make first request (maybe it enough)
t0, total = time.time(), self.searching(query, True)
if self.error:
self.pretty_error(what)
return None
# do async requests
if total > PAGES:
qrs = [PATTERNS[1] % (query, x) for x in rng(total)]
with ThreadPoolExecutor(len(qrs)) as executor:
executor.map(self.searching, qrs, timeout=30)
logger.debug(f"--- {time.time() - t0} seconds ---")
logger.info(f"Found torrents: {total}")
self._catch_errors(self._search, what, cat)
def download_torrent(self, url: str) -> None:
# Download url
response = self._request(url)
if self.error:
self.pretty_error(url)
return None
# Create a torrent file
with NamedTemporaryFile(suffix=".torrent", delete=False) as fd:
fd.write(response)
# return file path
logger.debug(fd.name + " " + url)
print(fd.name + " " + url)
self._catch_errors(self._download_torrent, url)
def login(self) -> None:
if self.error:
return None
self.mcj.clear()
# if we wanna use https we mast add bb_ssl=1 to cookie
self.mcj.set_cookie(Cookie(0, "bb_ssl", "1", None, False,
".rutracker.org", True, True, "/forum/",
True, True, None, False, None, None, {}))
form_data = {"login_username": config.username,
"login_password": config.password,
"login": "Вход"}
@ -227,42 +163,31 @@ class Rutracker: @@ -227,42 +163,31 @@ class Rutracker:
data_encoded = urlencode(form_data, encoding="cp1251").encode()
logger.debug(f"Login. Data after: {data_encoded}")
self._request(self.url_login, data_encoded)
if self.error:
return None
logger.debug(f"That we have: {[cookie for cookie in self.mcj]}")
if "bb_session" in [cookie.name for cookie in self.mcj]:
self.mcj.save(FILE_C, ignore_discard=True, ignore_expires=True)
logger.info("We successfully authorized")
else:
self.error = "We not authorized, please check your credentials!"
logger.warning(self.error)
def searching(self, query: str, first: bool = False) -> Optional[int]:
response = self._request(query)
if self.error:
return None
page, torrents_found = response.decode("cp1251"), -1
if "bb_session" not in [cookie.name for cookie in self.mcj]:
raise EngineError(
"We not authorized, please check your credentials!"
)
self.mcj.save(FILE_C, ignore_discard=True, ignore_expires=True)
logger.info("We successfully authorized")
def searching(self, query: str, first: bool = False) -> int:
page, torrents_found = self._request(query).decode("cp1251"), -1
if first:
# check login status
if "log-out-icon" not in page:
if "login-form-full" not in page:
self.error = "Unexpected page content"
return None
raise EngineError("Unexpected page content")
logger.debug("Looks like we lost session id, lets login")
self.login()
if self.error:
return None
# retry request because guests cant search
response = self._request(query)
if self.error:
return None
page = response.decode("cp1251")
page = self._request(query).decode("cp1251")
# firstly we check if there is a result
result = RE_RESULTS.search(page)
if not result:
self.error = "Unexpected page content"
return None
torrents_found = int(result[1])
if not torrents_found:
try:
torrents_found = int(RE_RESULTS.search(page)[1])
except TypeError:
raise EngineError("Unexpected page content")
if torrents_found <= 0:
return 0
self.draw(page)
@ -283,40 +208,97 @@ class Rutracker: @@ -283,40 +208,97 @@ class Rutracker:
"leech": tor[4]
})
def _catch_errors(self, handler: Callable, *args: str):
try:
self._init()
handler(*args)
except EngineError as ex:
self.pretty_error(args[0], str(ex))
except Exception as ex:
self.pretty_error(args[0], "Unexpected error, please check logs")
logger.exception(ex)
def _init(self) -> None:
# add proxy handler if needed
if config.proxy:
if not any(config.proxies.values()):
raise EngineError("Proxy enabled, but not set!")
self.session.add_handler(ProxyHandler(config.proxies))
logger.debug("Proxy is set!")
# change user-agent
self.session.addheaders = [("User-Agent", config.ua)]
# load local cookies
try:
self.mcj.load(FILE_C, ignore_discard=True)
if "bb_session" in [cookie.name for cookie in self.mcj]:
# if cookie.expires < int(time.time())
return logger.info("Local cookies is loaded")
logger.info("Local cookies expired or bad, try to login")
logger.debug(f"That we have: {[cookie for cookie in self.mcj]}")
except FileNotFoundError:
logger.info("Local cookies not exists, try to login")
self.login()
def _search(self, what: str, cat: str = "all") -> None:
query = PATTERNS[0] % (self.url, what.replace(" ", "+"),
self.supported_categories[cat])
# make first request (maybe it enough)
t0, total = time.time(), self.searching(query, True)
# do async requests
if total > PAGES:
qrs = [PATTERNS[1] % (query, x) for x in rng(total)]
with ThreadPoolExecutor(len(qrs)) as executor:
executor.map(self.searching, qrs, timeout=30)
logger.debug(f"--- {time.time() - t0} seconds ---")
logger.info(f"Found torrents: {total}")
def _download_torrent(self, url: str) -> None:
response = self._request(url)
# Create a torrent file
with NamedTemporaryFile(suffix=".torrent", delete=False) as fd:
fd.write(response)
# return file path
logger.debug(fd.name + " " + url)
print(fd.name + " " + url)
def _request(
self, url: str, data: Optional[bytes] = None, repeated: bool = False
) -> Optional[bytes]:
self, url: str, data: bytes = None, repeated: bool = False
) -> bytes:
try:
with self.session.open(url, data, 5) as r:
# checking that tracker isn't blocked
if r.geturl().startswith((self.url, self.url_dl)):
return r.read()
self.error = f"{url} is blocked. Try another proxy."
raise EngineError(f"{url} is blocked. Try another proxy.")
except (URLError, HTTPError) as err:
logger.error(err.reason)
error = str(err.reason)
reason = f"{url} is not response! Maybe it is blocked."
if "timed out" in error and not repeated:
logger.debug("Repeating request...")
logger.debug("Request timed out. Repeating...")
return self._request(url, data, True)
if "no host given" in error:
self.error = "Proxy is bad, try another!"
reason = "Proxy is bad, try another!"
elif hasattr(err, "code"):
self.error = f"Request to {url} failed with status: {err.code}"
else:
self.error = f"{url} is not response! Maybe it is blocked."
return None
def pretty_error(self, what: str) -> None:
prettyPrinter({"engine_url": self.url,
"desc_link": "https://github.com/imDMG/qBt_SE",
"name": f"[{unquote(what)}][Error]: {self.error}",
"link": self.url + "error",
"size": "1 TB", # lol
"seeds": 100,
"leech": 100})
self.error = None
reason = f"Request to {url} failed with status: {err.code}"
raise EngineError(reason)
def pretty_error(self, what: str, error: str) -> None:
prettyPrinter({
"engine_url": self.url,
"desc_link": "https://github.com/imDMG/qBt_SE",
"name": f"[{unquote(what)}][Error]: {error}",
"link": self.url + "error",
"size": "1 TB", # lol
"seeds": 100,
"leech": 100
})
# pep8

4
settings_gui.py

@ -22,10 +22,10 @@ class EngineSettingsGUI: @@ -22,10 +22,10 @@ class EngineSettingsGUI:
self.username = tk.StringVar(value=self.config.get("username", ""))
self.password = tk.StringVar(value=self.config.get("password", ""))
self.proxy_http = tk.StringVar(
value=self.config.get("proxies").get("http", "")
value=self.config.get("proxies", dict()).get("http", "")
)
self.proxy_https = tk.StringVar(
value=self.config.get("proxies").get("https", "")
value=self.config.get("proxies", dict()).get("https", "")
)
self.date = tk.BooleanVar(value=self.config.get("torrentDate", True))

Loading…
Cancel
Save