Browse Source

Many code refactoring (including bringing to a common structure). New error handler. Added rutracker.py plugin.

master
imDMG 5 years ago
parent
commit
b4e9c16afa
  1. 17
      .gitignore
  2. 17
      gui_kinozal.py
  3. BIN
      kinozal.ico
  4. BIN
      kinozal.png
  5. 395
      kinozal.py
  6. BIN
      nnmclub.ico
  7. BIN
      nnmclub.png
  8. 362
      nnmclub.py
  9. 280
      rutracker.py

17
.gitignore vendored

@ -0,0 +1,17 @@
/tests/
/venv/
/kinozal.cookie
/kinozal.cookie.bak
/kinozal.ico
/kinozal.json
/kinozal.json.bak
/nnmclub.cookie
/nnmclub.cookie.bak
/nnmclub.ico
/nnmclub.json
/nnmclub.json.bak
/rutracker.cookie
/rutracker.cookie.bak
/rutracker.ico
/rutracker.json
/rutracker.json.bak

17
gui_kinozal.py

@ -47,6 +47,23 @@ class kinozal(object):
"ua": "Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0" "ua": "Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0"
} }
icon = 'AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAQAQAAAAAAAAAAAAAAAAAAAAAAACARztMgEc7/4BHO' \
'/+ARztMAAAAAIBHO0yhd2n/gEc7/6F3af+ARztMAAAAAIBHO0yARzv/gEc7/4BHO0wAAAAAgEc7/7iYiv/O4+r/pH5x/4FIPP+kfnH' \
'/zsrE/87j6v/OycL/pYB1/4BHO/+jfHD/ztbV/7+yrP+ARzv/AAAAAIBHO//O4+r/zu/9/87v/f/O7/3/zu/9/87v/f/O7/3/zu/9/87v' \
'/f/O7/3/zu/9/87v/f/O1dT/gEc7/wAAAACARztMpYB1/87v/f8IC5X/CAuV/wgLlf8IC5X/zu/9/77h+v9vgcv/SFSy/wAAif97j87' \
'/oXdp/4BHO0wAAAAAAAAAAIBHO//O7/3/gabq/w4Tnv8OE57/gabq/87v/f96muj/DBCd/wAAif83SMf/zu/9/4BHO' \
'/8AAAAAAAAAAIBHO0ynhXv/zu/9/87v/f8OE57/CAuV/87v/f+63vn/Hyqx/wAAif9KXMX/zO38/87v/f+mhHn/gEc7TAAAAAChd2n' \
'/1eHk/87v/f/O7/3/DhOe/wgLlf9nhuT/MEPF/wAAif82ScT/utjy/87v/f/O7/3/zsrD/6F3af8AAAAAgEc7/9Pk6v/O7/3/zu/9' \
'/xQcqP8IC5X/FBqo/xUYlf9of9v/zu/9/87v/f/O7/3/zu/9/87d4f+ARzv/AAAAAIBHO//Y19X/zu/9/87v/f8RGaT/CAuV' \
'/wAAif90h8v/zu/9/87v/f/O7/3/zu/9/87v/f/OycL/gEc7/wAAAAChd2n/up6S/87v/f/O7/3/ERmk/wgLlf9DXdj/CQ6Z/zdAqf/O7' \
'/3/zu/9/87v/f/O7/3/upyQ/6F3af8AAAAAgEc7TIJLQP/P7/3/zu/9/xQcqP8IC5X/zu/9/46l2f8jNMD/gJXS/87v/f/O7/3/zu/9' \
'/45kXf+ARztMAAAAAAAAAACARzv/0e35/5Go2/8UHKj/CAuV/5Go2//O7/3/XHDY/w4Tn/8YHJf/QEms/9Dr9v+ARzv' \
'/AAAAAAAAAACARztMu6KY/9Hu+v8IC5X/CAuV/wgLlf8IC5X/zu/9/87v/f9OZtz/FB2q/y08wv/Q6/b/oXdp/4BHO0wAAAAAgEc7/9' \
'/s8P/R7fn/0e77/9Hu+//O7/3/zu/9/87v/f/O7/3/z+/9/9Dt+P/Q7Pf/3u3t/87n8P+ARzv/AAAAAIBHO//Sz8j/3+zw/7qhlf+IWE' \
'//o31w/9jZ2P/a7fH/2NfV/7ylm/+GVEr/qYyD/87o8f/R2dj/gEc7/wAAAACARztMgEc7/4BHO/+ARztMAAAAAIBHO0yARzv/gEc7' \
'/4BHO/+ARztMAAAAAIBHO0yARzv/gEc7' \
'/4BHO0wAAAAACCEAAAABAAAAAQAAAAEAAIADAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAACAAwAAAAEAAAABAAAAAQAACCEAAA== '
def __init__(self): def __init__(self):
# setup logging into qBittorrent/logs # setup logging into qBittorrent/logs
logging.basicConfig(handlers=[logging.FileHandler(self.path_to('../../logs', 'kinozal.log'), 'w', 'utf-8')], logging.basicConfig(handlers=[logging.FileHandler(self.path_to('../../logs', 'kinozal.log'), 'w', 'utf-8')],

BIN
kinozal.ico

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

BIN
kinozal.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 752 B

395
kinozal.py

@ -1,36 +1,29 @@
# VERSION: 2.1 # VERSION: 2.2
# AUTHORS: imDMG [imdmgg@gmail.com] # AUTHORS: imDMG [imdmgg@gmail.com]
# Kinozal.tv search engine plugin for qBittorrent # Kinozal.tv search engine plugin for qBittorrent
import base64
import json import json
import logging import logging
import os import os
import re import re
import socket
import tempfile import tempfile
import threading
import time import time
from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler from concurrent.futures.thread import ThreadPoolExecutor
from urllib.parse import urlencode from functools import partial
from urllib.error import URLError, HTTPError from html import unescape
from http.cookiejar import MozillaCookieJar from http.cookiejar import MozillaCookieJar
from novaprinter import prettyPrinter from urllib.error import URLError, HTTPError
from urllib.parse import urlencode, unquote
from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler
class kinozal(object): from novaprinter import prettyPrinter
name = 'Kinozal'
url = 'http://kinozal.tv'
supported_categories = {'all': '0',
'movies': '1002',
'tv': '1001',
'music': '1004',
'games': '23',
'anime': '20',
'software': '32'}
# default config for kinozal.json # default config
config = { config = {
"version": 2, "version": 2,
"torrentDate": True, "torrentDate": True,
"username": "USERNAME", "username": "USERNAME",
@ -41,202 +34,272 @@ class kinozal(object):
"https": "" "https": ""
}, },
"magnet": True, "magnet": True,
"ua": "Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0" "ua": "Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0 "
} }
def __init__(self):
# setup logging into qBittorrent/logs
logging.basicConfig(handlers=[logging.FileHandler(self.path_to('../../logs', 'kinozal.log'), 'w', 'utf-8')],
level=logging.DEBUG,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m-%d %H:%M')
try: def path_to(*file):
return os.path.abspath(os.path.join(os.path.dirname(__file__), *file))
def rng(t):
return range(1, -(-t // 50))
PATTERNS = (r'</span>Найдено\s+?(\d+)\s+?раздач',
r'nam"><a\s+?href="/(.+?)"\s+?class="r\d">(.*?)</a>.+?s\'>.+?s\'>'
r'(.*?)<.+?sl_s\'>(\d+)<.+?sl_p\'>(\d+)<.+?s\'>(.*?)</td>',
'%sbrowse.php?s=%s&c=%s', "%s&page=%s")
FILENAME = __file__[__file__.rfind('/') + 1:-3]
FILE_J, FILE_C = [path_to(FILENAME + fe) for fe in ['.json', '.cookie']]
# base64 encoded image
ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAQAQAAAAAAAAAAA"
"AAAAAAAAAAAACARztMgEc7/4BHO/+ARztMAAAAAIBHO0yhd2n/gEc7/6F3af+ARztMAAAA"
"AIBHO0yARzv/gEc7/4BHO0wAAAAAgEc7/7iYiv/O4+r/pH5x/4FIPP+kfnH/zsrE/87j6v"
"/OycL/pYB1/4BHO/+jfHD/ztbV/7+yrP+ARzv/AAAAAIBHO//O4+r/zu/9/87v/f/O7/3/"
"zu/9/87v/f/O7/3/zu/9/87v/f/O7/3/zu/9/87v/f/O1dT/gEc7/wAAAACARztMpYB1/8"
"7v/f8IC5X/CAuV/wgLlf8IC5X/zu/9/77h+v9vgcv/SFSy/wAAif97j87/oXdp/4BHO0wA"
"AAAAAAAAAIBHO//O7/3/gabq/w4Tnv8OE57/gabq/87v/f96muj/DBCd/wAAif83SMf/zu"
"/9/4BHO/8AAAAAAAAAAIBHO0ynhXv/zu/9/87v/f8OE57/CAuV/87v/f+63vn/Hyqx/wAA"
"if9KXMX/zO38/87v/f+mhHn/gEc7TAAAAAChd2n/1eHk/87v/f/O7/3/DhOe/wgLlf9nhu"
"T/MEPF/wAAif82ScT/utjy/87v/f/O7/3/zsrD/6F3af8AAAAAgEc7/9Pk6v/O7/3/zu/9"
"/xQcqP8IC5X/FBqo/xUYlf9of9v/zu/9/87v/f/O7/3/zu/9/87d4f+ARzv/AAAAAIBHO/"
"/Y19X/zu/9/87v/f8RGaT/CAuV/wAAif90h8v/zu/9/87v/f/O7/3/zu/9/87v/f/OycL/"
"gEc7/wAAAAChd2n/up6S/87v/f/O7/3/ERmk/wgLlf9DXdj/CQ6Z/zdAqf/O7/3/zu/9/8"
"7v/f/O7/3/upyQ/6F3af8AAAAAgEc7TIJLQP/P7/3/zu/9/xQcqP8IC5X/zu/9/46l2f8j"
"NMD/gJXS/87v/f/O7/3/zu/9/45kXf+ARztMAAAAAAAAAACARzv/0e35/5Go2/8UHKj/CA"
"uV/5Go2//O7/3/XHDY/w4Tn/8YHJf/QEms/9Dr9v+ARzv/AAAAAAAAAACARztMu6KY/9Hu"
"+v8IC5X/CAuV/wgLlf8IC5X/zu/9/87v/f9OZtz/FB2q/y08wv/Q6/b/oXdp/4BHO0wAAA"
"AAgEc7/9/s8P/R7fn/0e77/9Hu+//O7/3/zu/9/87v/f/O7/3/z+/9/9Dt+P/Q7Pf/3u3t"
"/87n8P+ARzv/AAAAAIBHO//Sz8j/3+zw/7qhlf+IWE//o31w/9jZ2P/a7fH/2NfV/7ylm/"
"+GVEr/qYyD/87o8f/R2dj/gEc7/wAAAACARztMgEc7/4BHO/+ARztMAAAAAIBHO0yARzv/"
"gEc7/4BHO/+ARztMAAAAAIBHO0yARzv/gEc7/4BHO0wAAAAACCEAAAABAAAAAQAAAAEAAI"
"ADAAAAAQAAAAEAAAABAAAAAQAAAAEAAAABAACAAwAAAAEAAAABAAAAAQAACCEAAA== ")
# setup logging
logging.basicConfig(
format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
datefmt="%m-%d %H:%M")
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
try:
# try to load user data from file # try to load user data from file
with open(self.path_to('kinozal.json'), 'r+') as f: with open(FILE_J, 'r+') as f:
config = json.load(f) cfg = json.load(f)
if "version" not in config.keys(): if "version" not in cfg.keys():
config.update({"version": 2, "torrentDate": True}) cfg.update({"version": 2, "torrentDate": True})
f.seek(0) f.seek(0)
f.write(json.dumps(config, indent=4, sort_keys=False)) f.write(json.dumps(cfg, indent=4, sort_keys=False))
f.truncate() f.truncate()
self.config = config config = cfg
except OSError as e: logger.debug("Config is loaded.")
logging.error(e) except OSError as e:
logger.error(e)
# if file doesn't exist, we'll create it # if file doesn't exist, we'll create it
with open(self.path_to('kinozal.json'), 'w') as f: with open(FILE_J, 'w') as f:
f.write(json.dumps(self.config, indent=4, sort_keys=False)) f.write(json.dumps(config, indent=4, sort_keys=False))
# also write/rewrite ico file
with open(path_to(FILENAME + '.ico'), 'wb') as f:
f.write(base64.b64decode(ICON))
logger.debug("Write files.")
class kinozal(object):
name = 'Kinozal'
url = 'http://kinozal.tv/'
supported_categories = {'all': '0',
'movies': '1002',
'tv': '1001',
'music': '1004',
'games': '23',
'anime': '20',
'software': '32'}
def __init__(self):
# error message
self.error = None
# establish connection # establish connection
self.session = build_opener() self.session = build_opener()
# add proxy handler if needed # add proxy handler if needed
if self.config['proxy'] and any(self.config['proxies'].keys()): if config['proxy']:
self.session.add_handler(ProxyHandler(self.config['proxies'])) if any(config['proxies'].values()):
self.session.add_handler(ProxyHandler(config['proxies']))
logger.debug("Proxy is set!")
else:
self.error = "Proxy enabled, but not set!"
# change user-agent # change user-agent
self.session.addheaders.pop() self.session.addheaders.pop()
self.session.addheaders.append(('User-Agent', self.config['ua'])) self.session.addheaders.append(('User-Agent', config['ua']))
# avoid endless waiting
self.blocked = False
mcj = MozillaCookieJar()
cookie_file = os.path.abspath(os.path.join(os.path.dirname(__file__), 'kinozal.cookie'))
# load local cookies # load local cookies
if os.path.isfile(cookie_file): mcj = MozillaCookieJar()
mcj.load(cookie_file, ignore_discard=True) try:
mcj.load(FILE_C, ignore_discard=True)
if 'uid' in [cookie.name for cookie in mcj]: if 'uid' in [cookie.name for cookie in mcj]:
# if cookie.expires < int(time.time()) # if cookie.expires < int(time.time())
logging.info("Local cookies is loaded") logger.info("Local cookies is loaded")
self.session.add_handler(HTTPCookieProcessor(mcj)) self.session.add_handler(HTTPCookieProcessor(mcj))
else: else:
logging.info("Local cookies expired or bad") logger.info("Local cookies expired or bad")
logging.debug(f"That we have: {[cookie for cookie in mcj]}") logger.debug(f"That we have: {[cookie for cookie in mcj]}")
mcj.clear() mcj.clear()
self.login(mcj, cookie_file) self.login(mcj)
else: except FileNotFoundError:
self.login(mcj, cookie_file) self.login(mcj)
def login(self, mcj, cookie_file):
self.session.add_handler(HTTPCookieProcessor(mcj))
form_data = {"username": self.config['username'], "password": self.config['password']}
# so we first encode keys to cp1251 then do default decode whole string
data_encoded = urlencode({k: v.encode('cp1251') for k, v in form_data.items()}).encode()
self._catch_error_request(self.url + '/takelogin.php', data_encoded)
if 'uid' not in [cookie.name for cookie in mcj]:
logging.warning("we not authorized, please check your credentials")
else:
mcj.save(cookie_file, ignore_discard=True, ignore_expires=True)
logging.info('We successfully authorized')
def draw(self, html: str):
torrents = re.findall(r'nam"><a\s+?href="(.+?)"\s+?class="r\d">(.*?)</a>'
r'.+?s\'>.+?s\'>(.*?)<.+?sl_s\'>(\d+)<.+?sl_p\'>(\d+)<.+?s\'>(.*?)</td>', html, re.S)
today, yesterday = time.strftime("%y.%m.%d"), time.strftime("%y.%m.%d", time.localtime(time.time()-86400))
for tor in torrents:
torrent_date = ""
if self.config['torrentDate']:
ct = tor[5].split()[0]
if "сегодня" in ct:
torrent_date = today
elif "вчера" in ct:
# yeah this is yesterday
torrent_date = yesterday
else:
torrent_date = time.strftime("%y.%m.%d", time.strptime(ct, "%d.%m.%Y"))
torrent_date = f'[{torrent_date}] '
torrent = {"engine_url": self.url,
"desc_link": self.url + tor[0],
"name": torrent_date + tor[1],
"link": "http://dl.kinozal.tv/download.php?id=" + tor[0].split("=")[1],
"size": self.units_convert(tor[2]),
"seeds": tor[3],
"leech": tor[4]}
prettyPrinter(torrent)
del torrents
# return len(torrents)
def path_to(self, *file): def search(self, what, cat='all'):
return os.path.abspath(os.path.join(os.path.dirname(__file__), *file)) if self.error:
self.pretty_error(what)
return
query = PATTERNS[2] % (self.url, what.replace(" ", "+"),
self.supported_categories[cat])
@staticmethod # make first request (maybe it enough)
def units_convert(unit): t0, total = time.time(), self.searching(query, True)
# replace size units if self.error:
find = unit.split()[1] self.pretty_error(what)
replace = {'ТБ': 'TB', 'ГБ': 'GB', 'МБ': 'MB', 'КБ': 'KB'}[find] return
# do async requests
if total > 50:
qrs = [PATTERNS[3] % (query, x) for x in rng(total)]
with ThreadPoolExecutor(len(qrs)) as executor:
executor.map(self.searching, qrs, timeout=30)
return unit.replace(find, replace) logger.debug(f"--- {time.time() - t0} seconds ---")
logger.info(f"Found torrents: {total}")
def download_torrent(self, url: str): def download_torrent(self, url: str):
if self.blocked:
return
# choose download method # choose download method
if self.config.get("magnet"): if config.get("magnet"):
res = self._catch_error_request(self.url + "/get_srv_details.php?action=2&id=" + url.split("=")[1]) url = f"{self.url}get_srv_details.php?" \
# magnet = re.search(":\s([A-Z0-9]{40})<", res.read().decode())[1] f"action=2&id={url.split('=')[1]}"
magnet = 'magnet:?xt=urn:btih:' + res.read().decode()[18:58]
# return magnet link res = self._catch_error_request(url)
logging.debug(magnet + " " + url) if self.error:
print(magnet + " " + url) self.pretty_error(url)
return
if config.get("magnet"):
path = 'magnet:?xt=urn:btih:' + res.read().decode()[18:58]
else: else:
# Create a torrent file # Create a torrent file
file, path = tempfile.mkstemp('.torrent') file, path = tempfile.mkstemp('.torrent')
file = os.fdopen(file, "wb") with os.fdopen(file, "wb") as fd:
# Download url
response = self._catch_error_request(url)
# Write it to a file # Write it to a file
file.write(response.read()) fd.write(res.read())
file.close()
# return file path # return magnet link / file path
logging.debug(path + " " + url) logger.debug(path + " " + url)
print(path + " " + url) print(path + " " + url)
def login(self, mcj):
if self.error:
return
self.session.add_handler(HTTPCookieProcessor(mcj))
form_data = {"username": config['username'],
"password": config['password']}
logger.debug(f"Login. Data before: {form_data}")
# so we first encode vals to cp1251 then do default decode whole string
data_encoded = urlencode(
{k: v.encode('cp1251') for k, v in form_data.items()}).encode()
logger.debug(f"Login. Data after: {data_encoded}")
self._catch_error_request(self.url + 'takelogin.php', data_encoded)
if self.error:
return
logger.debug(f"That we have: {[cookie for cookie in mcj]}")
if 'uid' in [cookie.name for cookie in mcj]:
mcj.save(FILE_C, ignore_discard=True, ignore_expires=True)
logger.info('We successfully authorized')
else:
self.error = "We not authorized, please check your credentials!"
logger.warning(self.error)
def searching(self, query, first=False): def searching(self, query, first=False):
response = self._catch_error_request(query) response = self._catch_error_request(query)
if not response:
return None
page = response.read().decode('cp1251') page = response.read().decode('cp1251')
self.draw(page) self.draw(page)
total = int(re.search(r'</span>Найдено\s+?(\d+)\s+?раздач', page)[1]) if first else -1
return total
def search(self, what, cat='all'): return int(re.search(PATTERNS[0], page)[1]) if first else -1
if self.blocked:
return
query = f'{self.url}/browse.php?s={what.replace(" ", "+")}&c={self.supported_categories[cat]}'
# make first request (maybe it enough) def draw(self, html: str):
total = self.searching(query, True) torrents = re.findall(PATTERNS[1], html, re.S)
# do async requests _part = partial(time.strftime, "%y.%m.%d")
if total > 50: # yeah this is yesterday
tasks = [] yesterday = _part(time.localtime(time.time() - 86400))
for x in range(1, -(-total//50)): for tor in torrents:
task = threading.Thread(target=self.searching, args=(query + f"&page={x}",)) torrent_date = ""
tasks.append(task) if config['torrentDate']:
task.start() ct = tor[5].split()[0]
if "сегодня" in ct:
torrent_date = _part()
elif "вчера" in ct:
torrent_date = yesterday
else:
torrent_date = _part(time.strptime(ct, "%d.%m.%Y"))
torrent_date = f'[{torrent_date}] '
# wait slower request in stack # replace size units
for task in tasks: table = {'Т': 'T', 'Г': 'G', 'М': 'M', 'К': 'K', 'Б': 'B'}
task.join()
del tasks
logging.debug(f"--- {time.time() - start_time} seconds ---") prettyPrinter({
logging.info(f"Found torrents: {total}") "engine_url": self.url,
"desc_link": self.url + tor[0],
"name": torrent_date + unescape(tor[1]),
"link": "http://dl.kinozal.tv/download.php?id=" +
tor[0].split("=")[1],
"size": tor[2].translate(tor[2].maketrans(table)),
"seeds": tor[3],
"leech": tor[4]
})
del torrents
def _catch_error_request(self, url='', data=None): def _catch_error_request(self, url='', data=None, retrieve=False):
url = url or self.url url = url or self.url
try: try:
response = self.session.open(url, data) response = self.session.open(url, data, 5)
# Only continue if response status is OK.
if response.getcode() != 200:
logging.error('Unable connect')
raise HTTPError(response.geturl(), response.getcode(),
f"HTTP request to {url} failed with status: {response.getcode()}",
response.info(), None)
except (URLError, HTTPError) as e:
logging.error(e)
raise e
# checking that tracker is'nt blocked # checking that tracker is'nt blocked
self.blocked = False
if self.url not in response.geturl(): if self.url not in response.geturl():
logging.warning(f"{self.url} is blocked. Try proxy or another proxy") raise URLError(f"{self.url} is blocked. Try another proxy.")
self.blocked = True except (socket.error, socket.timeout) as err:
if not retrieve:
return self._catch_error_request(url, data, True)
logger.error(err)
self.error = f"{self.url} is not response! Maybe it is blocked."
if "no host given" in err.args:
self.error = "Proxy is bad, try another!"
except (URLError, HTTPError) as err:
logger.error(err.reason)
self.error = err.reason
if hasattr(err, 'code'):
self.error = f"Request to {url} failed with status: {err.code}"
else:
return response return response
return None
def pretty_error(self, what):
prettyPrinter({"engine_url": self.url,
"desc_link": "https://github.com/imDMG/qBt_SE",
"name": f"[{unquote(what)}][Error]: {self.error}",
"link": self.url + "error",
"size": "1 TB", # lol
"seeds": 100,
"leech": 100})
self.error = None
if __name__ == "__main__": if __name__ == "__main__":
# benchmark start engine = kinozal()
start_time = time.time() engine.search('doctor')
kinozal_se = kinozal()
kinozal_se.search('doctor')
print("--- %s seconds ---" % (time.time() - start_time))
# benchmark end

BIN
nnmclub.ico

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

BIN
nnmclub.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 912 B

362
nnmclub.py

@ -1,36 +1,27 @@
# VERSION: 2.1 # VERSION: 2.2
# AUTHORS: imDMG [imdmgg@gmail.com] # AUTHORS: imDMG [imdmgg@gmail.com]
# NoNaMe-Club search engine plugin for qBittorrent # NoNaMe-Club search engine plugin for qBittorrent
import base64
import json import json
import logging import logging
import os import os
import re import re
import socket
import tempfile import tempfile
import threading
import time import time
from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler from concurrent.futures import ThreadPoolExecutor
from urllib.parse import urlencode # , parse_qs
from urllib.error import URLError, HTTPError
from http.cookiejar import Cookie, MozillaCookieJar from http.cookiejar import Cookie, MozillaCookieJar
from novaprinter import prettyPrinter from urllib.error import URLError, HTTPError
from urllib.parse import urlencode, unquote
from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler
class nnmclub(object): from novaprinter import prettyPrinter
name = 'NoNaMe-Club'
url = 'https://nnmclub.to/forum/'
supported_categories = {'all': '-1',
'movies': '14',
'tv': '27',
'music': '16',
'games': '17',
'anime': '24',
'software': '21'}
# default config for nnmclub.json # default config
config = { config = {
"version": 2, "version": 2,
"torrentDate": True, "torrentDate": True,
"username": "USERNAME", "username": "USERNAME",
@ -40,189 +31,266 @@ class nnmclub(object):
"http": "", "http": "",
"https": "" "https": ""
}, },
"ua": "Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0" "magnet": True,
} "ua": "Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0 "
}
def __init__(self):
# setup logging into qBittorrent/logs
logging.basicConfig(handlers=[logging.FileHandler(self.path_to('../../logs', 'nnmclub.log'), 'w', 'utf-8')],
level=logging.DEBUG,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m-%d %H:%M')
try: def path_to(*file):
return os.path.abspath(os.path.join(os.path.dirname(__file__), *file))
def rng(t):
return range(50, -(-t // 50) * 50, 50)
PATTERNS = (r'(\d{1,3})\s\(max:',
r'd\stopic.+?href="(.+?)".+?<b>(.+?)</b>.+?href="(d.+?)".+?/u>\s'
r'(.+?)<.+?b>(\d+)</.+?b>(\d+)<.+?<u>(\d+)</u>',
'%stracker.php?nm=%s&%s', "%s&start=%s", r'code"\svalue="(.+?)"')
FILENAME = __file__[__file__.rfind('/') + 1:-3]
FILE_J, FILE_C = [path_to(FILENAME + fe) for fe in ['.json', '.cookie']]
# base64 encoded image
ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAAAAAAAAAAAAAAA"
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaQicAXRQFADICAQAHAAAA"
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADz4QA8PizAP"
"u3XQDpjEIBtgkCABoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
"BAIAEuyUAP3/8AD//akA//+hAP92SgCVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFA"
"AAAAAAAAAAAAAAAAEAADjLiQD8//wA//7RFP//+lX/WlsPlwAAAAMAAAAGAAAAAAAAAAAA"
"AAAAEAgAQqNBAP99HADfIAYAfgAAABQAAAAX21UC///4AP///Sj/+/Z//lZcMJOOjQCrqI"
"EAwQ4CADAAAAAAAAAAAGEXAM39oAD//7oA/9ucAP94GwDFVRkK6p0wAP//owD/+KoB/+FT"
"C///uQD//+wA//67AP6QUQC9DggAGAAAAACPNQDl964A//qqAv//3AD//8sB/39WAP85Aw"
"X/nxkA/5MQAP/sJQD/0T8A//Z9AP/6kwD/86AA/qJGALwTAABEtzcA5cshAP/jOAD//7wg"
"///+Dv/RUQH/AgEE8hcAAG40BgB3RAAAzlYCAPh0BAD/zh8A//+RAP//hQD/5B8A/xcAAE"
"x+HgDXz5oc/8yfPv//2g7/6VMA/AkEABQAAAAAAAAAAQAAAA4cCgBBOwkAg3EfAKyPfQDE"
"dkAAq0ELAGYAAAAABQMBQNldFf3/8w3///sA/7AoAPIAAAAAAAAAAAAAAAAAAAAAAAAAAA"
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAchNAPLaLgD/+8AA//eOAP9qDAGpAAAAAAAAAAAA"
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFwLgCX0h8A//WiAP/+TQD/Kg"
"QAZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALQwAZqgR"
"APr0hwD/2VIA/QAAAAYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
"AAAAAAAAoBACp6BAD/7H0A/3ZlALoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
"AAAAAAAAAAAAAAAAAAAAAAARAQAx4zcA/93AAPQAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAA"
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACgEASawXAPMTCgAnAAAAAAAAAAAAAAAA"
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/D+sQfgfrEH4H6xBuAesQQ"
"ADrEEAAaxBAACsQQAArEEBAKxBg/+sQQP/rEED/6xBg/+sQYf/rEGH/6xBj/+sQQ==")
# setup logging
logging.basicConfig(
format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
datefmt="%m-%d %H:%M")
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
try:
# try to load user data from file # try to load user data from file
with open(self.path_to('nnmclub.json'), 'r+') as f: with open(FILE_J, 'r+') as f:
config = json.load(f) cfg = json.load(f)
if "version" not in config.keys(): if "version" not in cfg.keys():
config.update({"version": 2, "torrentDate": True}) cfg.update({"version": 2, "torrentDate": True})
f.seek(0) f.seek(0)
f.write(json.dumps(config, indent=4, sort_keys=False)) f.write(json.dumps(cfg, indent=4, sort_keys=False))
f.truncate() f.truncate()
self.config = config config = cfg
except OSError as e: logger.debug("Config is loaded.")
logging.error(e) except OSError as e:
logger.error(e)
# if file doesn't exist, we'll create it # if file doesn't exist, we'll create it
with open(self.path_to('nnmclub.json'), 'w') as f: with open(FILE_J, 'w') as f:
f.write(json.dumps(self.config, indent=4, sort_keys=False)) f.write(json.dumps(config, indent=4, sort_keys=False))
# also write/rewrite ico file
with open(path_to(FILENAME + '.ico'), 'wb') as f:
f.write(base64.b64decode(ICON))
logger.debug("Write files.")
class nnmclub(object):
name = 'NoNaMe-Club'
url = 'https://nnmclub.to/forum/'
supported_categories = {'all': '-1',
'movies': '14',
'tv': '27',
'music': '16',
'games': '17',
'anime': '24',
'software': '21'}
def __init__(self):
# error message
self.error = None
# establish connection # establish connection
self.session = build_opener() self.session = build_opener()
# add proxy handler if needed # add proxy handler if needed
if self.config['proxy'] and any(self.config['proxies'].keys()): if config['proxy']:
self.session.add_handler(ProxyHandler(self.config['proxies'])) if any(config['proxies'].values()):
self.session.add_handler(ProxyHandler(config['proxies']))
logger.debug("Proxy is set!")
else:
self.error = "Proxy enabled, but not set!"
# change user-agent # change user-agent
self.session.addheaders.pop() self.session.addheaders.pop()
self.session.addheaders.append(('User-Agent', self.config['ua'])) self.session.addheaders.append(('User-Agent', config['ua']))
# avoid endless waiting
self.blocked = False
mcj = MozillaCookieJar()
cookie_file = self.path_to('nnmclub.cookie')
# load local cookies # load local cookies
if os.path.isfile(cookie_file): mcj = MozillaCookieJar()
mcj.load(cookie_file, ignore_discard=True) try:
mcj.load(FILE_C, ignore_discard=True)
if 'phpbb2mysql_4_sid' in [cookie.name for cookie in mcj]: if 'phpbb2mysql_4_sid' in [cookie.name for cookie in mcj]:
# if cookie.expires < int(time.time()) # if cookie.expires < int(time.time())
logging.info("Local cookies is loaded") logger.info("Local cookies is loaded")
self.session.add_handler(HTTPCookieProcessor(mcj)) self.session.add_handler(HTTPCookieProcessor(mcj))
else: else:
logging.info("Local cookies expired or bad") logger.info("Local cookies expired or bad")
logging.debug(f"That we have: {[cookie for cookie in mcj]}") logger.debug(f"That we have: {[cookie for cookie in mcj]}")
mcj.clear() mcj.clear()
self.login(mcj, cookie_file) self.login(mcj)
else: except FileNotFoundError:
self.login(mcj, cookie_file) self.login(mcj)
def search(self, what, cat='all'):
if self.error:
self.pretty_error(what)
return
c = self.supported_categories[cat]
query = PATTERNS[2] % (self.url, what.replace(" ", "+"),
"f=-1" if c == "-1" else "c=" + c)
# make first request (maybe it enough)
t0, total = time.time(), self.searching(query, True)
if self.error:
self.pretty_error(what)
return
# do async requests
if total > 50:
qrs = [PATTERNS[3] % (query, x) for x in rng(total)]
with ThreadPoolExecutor(len(qrs)) as executor:
executor.map(self.searching, qrs, timeout=30)
logger.debug(f"--- {time.time() - t0} seconds ---")
logger.info(f"Found torrents: {total}")
def login(self, mcj, cookie_file): def download_torrent(self, url: str):
# Download url
response = self._catch_error_request(url)
if self.error:
self.pretty_error(url)
return
# Create a torrent file
file, path = tempfile.mkstemp('.torrent')
with os.fdopen(file, "wb") as fd:
# Write it to a file
fd.write(response.read())
# return file path
logger.debug(path + " " + url)
print(path + " " + url)
def login(self, mcj):
if self.error:
return
# if we wanna use https we mast add ssl=enable_ssl to cookie # if we wanna use https we mast add ssl=enable_ssl to cookie
mcj.set_cookie(Cookie(0, 'ssl', "enable_ssl", None, False, '.nnmclub.to', True, mcj.set_cookie(Cookie(0, 'ssl', "enable_ssl", None, False,
False, '/', True, False, None, 'ParserCookie', None, None, None)) '.nnmclub.to', True, False, '/', True,
False, None, 'ParserCookie', None, None, None))
self.session.add_handler(HTTPCookieProcessor(mcj)) self.session.add_handler(HTTPCookieProcessor(mcj))
response = self._catch_error_request(self.url + 'login.php') response = self._catch_error_request(self.url + 'login.php')
if not self.blocked: if not response:
code = re.search(r'code"\svalue="(.+?)"', response.read().decode('cp1251'))[1] return None
form_data = {"username": self.config['username'], code = re.search(PATTERNS[4], response.read().decode('cp1251'))[1]
"password": self.config['password'], form_data = {"username": config['username'],
"password": config['password'],
"autologin": "on", "autologin": "on",
"code": code, "code": code,
"login": "Вход"} "login": "Вход"}
# so we first encode keys to cp1251 then do default decode whole string # so we first encode vals to cp1251 then do default decode whole string
data_encoded = urlencode({k: v.encode('cp1251') for k, v in form_data.items()}).encode() data_encoded = urlencode(
{k: v.encode('cp1251') for k, v in form_data.items()}).encode()
self._catch_error_request(self.url + 'login.php', data_encoded) self._catch_error_request(self.url + 'login.php', data_encoded)
if 'phpbb2mysql_4_sid' not in [cookie.name for cookie in mcj]: if self.error:
logging.warning("we not authorized, please check your credentials") return
logger.debug(f"That we have: {[cookie for cookie in mcj]}")
if 'phpbb2mysql_4_sid' in [cookie.name for cookie in mcj]:
mcj.save(FILE_C, ignore_discard=True, ignore_expires=True)
logger.info('We successfully authorized')
else: else:
mcj.save(cookie_file, ignore_discard=True, ignore_expires=True) self.error = "We not authorized, please check your credentials!"
logging.info('We successfully authorized') logger.warning(self.error)
def draw(self, html: str): def draw(self, html: str):
torrents = re.findall(r'd\stopic.+?href="(.+?)".+?<b>(.+?)</b>.+?href="(d.+?)"' torrents = re.findall(PATTERNS[1], html, re.S)
r'.+?/u>\s(.+?)<.+?b>(\d+)</.+?b>(\d+)<.+?<u>(\d+)</u>', html, re.S)
for tor in torrents: for tor in torrents:
torrent_date = "" torrent_date = ""
if self.config['torrentDate']: if config['torrentDate']:
torrent_date = f'[{time.strftime("%y.%m.%d", time.localtime(int(tor[6])))}] ' _loc = time.localtime(int(tor[6]))
torrent = {"engine_url": self.url, torrent_date = f'[{time.strftime("%y.%m.%d", _loc)}] '
prettyPrinter({
"engine_url": self.url,
"desc_link": self.url + tor[0], "desc_link": self.url + tor[0],
"name": torrent_date + tor[1], "name": torrent_date + tor[1],
"link": self.url + tor[2], "link": self.url + tor[2],
"size": tor[3].replace(',', '.'), "size": tor[3].replace(',', '.'),
"seeds": tor[4], "seeds": tor[4],
"leech": tor[5]} "leech": tor[5]
})
prettyPrinter(torrent)
del torrents del torrents
# return len(torrents)
def path_to(self, *file):
return os.path.abspath(os.path.join(os.path.dirname(__file__), *file))
def download_torrent(self, url):
if self.blocked:
return
# Create a torrent file
file, path = tempfile.mkstemp('.torrent')
file = os.fdopen(file, "wb")
# Download url
response = self._catch_error_request(url)
# Write it to a file
file.write(response.read())
file.close()
# return file path
logging.debug(path + " " + url)
print(path + " " + url)
def searching(self, query, first=False): def searching(self, query, first=False):
response = self._catch_error_request(query) response = self._catch_error_request(query)
if not response:
return None
page = response.read().decode('cp1251') page = response.read().decode('cp1251')
self.draw(page) self.draw(page)
total = int(re.search(r'(\d{1,3})\s\(max:', page)[1]) if first else -1
return total
def search(self, what, cat='all'): return int(re.search(PATTERNS[0], page)[1]) if first else -1
if self.blocked:
return
c = self.supported_categories[cat]
query = f'{self.url}tracker.php?nm={what.replace(" ", "+")}&{"f=-1" if c == "-1" else "c=" + c}'
# make first request (maybe it enough) def _catch_error_request(self, url='', data=None, retrieve=False):
total = self.searching(query, True)
# do async requests
if total > 50:
tasks = []
for x in range(1, -(-total//50)):
task = threading.Thread(target=self.searching, args=(query + f"&start={x * 50}",))
tasks.append(task)
task.start()
# wait slower request in stack
for task in tasks:
task.join()
del tasks
logging.debug(f"--- {time.time() - start_time} seconds ---")
logging.info(f"Found torrents: {total}")
def _catch_error_request(self, url='', data=None):
url = url or self.url url = url or self.url
try: try:
response = self.session.open(url, data) response = self.session.open(url, data, 5)
# Only continue if response status is OK.
if response.getcode() != 200:
logging.error('Unable connect')
raise HTTPError(response.geturl(), response.getcode(),
f"HTTP request to {url} failed with status: {response.getcode()}",
response.info(), None)
except (URLError, HTTPError) as e:
logging.error(e)
raise e
# checking that tracker is'nt blocked # checking that tracker is'nt blocked
self.blocked = False if not any([x in response.geturl()
if self.url not in response.geturl(): # redirect to nnm-club.ws on download
print(response.geturl()) for x in [self.url, 'nnm-club.ws']]):
logging.warning(f"{self.url} is blocked. Try proxy or another proxy") raise URLError(f"{self.url} is blocked. Try another proxy.")
self.blocked = True except (socket.error, socket.timeout) as err:
if not retrieve:
return self._catch_error_request(url, data, True)
logger.error(err)
self.error = f"{self.url} is not response! Maybe it is blocked."
if "no host given" in err.args:
self.error = "Proxy is bad, try another!"
except (URLError, HTTPError) as err:
logger.error(err.reason)
self.error = err.reason
if hasattr(err, 'code'):
self.error = f"Request to {url} failed with status: {err.code}"
else:
return response return response
return None
def pretty_error(self, what):
prettyPrinter({"engine_url": self.url,
"desc_link": "https://github.com/imDMG/qBt_SE",
"name": f"[{unquote(what)}][Error]: {self.error}",
"link": self.url + "error",
"size": "1 TB", # lol
"seeds": 100,
"leech": 100})
self.error = None
if __name__ == "__main__": if __name__ == "__main__":
# benchmark start engine = nnmclub()
start_time = time.time() engine.search('doctor')
# nnmclub_se = nnmclub()
# nnmclub_se.search('bird')
print(f"--- {time.time() - start_time} seconds ---")
# benchmark end

280
rutracker.py

@ -0,0 +1,280 @@
# VERSION: 1.0
# AUTHORS: imDMG [imdmgg@gmail.com]
# rutracker.org search engine plugin for qBittorrent
import base64
import json
import logging
import os
import re
import socket
import tempfile
import time
from concurrent.futures import ThreadPoolExecutor
from html import unescape
from http.cookiejar import Cookie, MozillaCookieJar
from urllib.error import URLError, HTTPError
from urllib.parse import urlencode, unquote
from urllib.request import build_opener, HTTPCookieProcessor, ProxyHandler
from novaprinter import prettyPrinter
# default config
config = {
"version": 2,
"torrentDate": True,
"username": "USERNAME",
"password": "PASSWORD",
"proxy": False,
"proxies": {
"http": "",
"https": ""
},
"ua": "Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0 "
}
def path_to(*file):
return os.path.abspath(os.path.join(os.path.dirname(__file__), *file))
def rng(t):
return range(50, -(-t // 50) * 50, 50)
PATTERNS = (r'(\d{1,3})\s<span',
r'bold"\shref="(viewtopic\.php\?t=\d+)">(.+?)</a.+?(dl\.php\?t=\d+)'
r'">(.+?)\s&.+?data-ts_text="(.+?)">.+?Личи">(\d+)</.+?data-ts_'
r'text="(\d+)"', '%s/tracker.php?nm=%s&c=%s', "%s&start=%s")
FILENAME = __file__[__file__.rfind('/') + 1:-3]
FILE_J, FILE_C = [path_to(FILENAME + fl) for fl in ['.json', '.cookie']]
# base64 encoded image
ICON = ("AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAAAAAABMLAAATCw"
"AAAAAAAAAAAAAAAAAAAAAAAAAAAABs3wUAY8wFBGPMBQN2sw8A9kA6AOdOOl/nTjo/5046"
"AOdOOgDnTjoA5046AOdOOgHnTjoAAAAAAAAAAAB28wUAY8wFAGPMBWBjzAVWXtEHAMdsKg"
"DnTjqf50464+dOOmnnTjoh5046JudOOmLnTjp85046DAAAAAAAAAAAbN8FAGPMBQxjzAXA"
"Y8wF1WPMBSNX2AAA9z86nehNOv/nTjr750464+dOOubnTjr/5046oedOOgMAAAAAdfEFAG"
"PMBQBjzAVPY8wF82PMBf9jzAW0XdEHOt5XNnbhVDSm6U04v+dOOvvnTjr/5046/edOOl3n"
"TjoAbN8FDWPMBSljzAVpY8wF3GPMBf9jzAX/Y8wF/2PMBe5Y1wXYS+MAyY2kHHvwRjvr50"
"46/+dOOvnnTjpK5046AGPMBZRjzAXpY8wF/WPMBf9jzAX/Y8wF/2PNBP9jzAX/YswF/1rU"
"Aa/qSzat5046/udOOv/nTjr/5046iudOOgJjzAUsY8wFq2PMBfxjzAX/Y8wF/2LFDsNfvx"
"afY90AzVjhAM/WXy6U6E07+OdOOv/nTjr/5046/+dOOuznTjpbY8wFAGPMBRJjzAWxY8wF"
"/2PNA/5cojyQRQD/t0kn36dejFVk+Ek4wedOOv/nTjr/6E447edOOsznTjrI5046pmzfBQ"
"BjzAUAY8wFWWPMBf1jzAX/YtgAu0cc7LhGI+T/Nxb+su9LM6zoTjn/8U4v1bBAc2i/R1MT"
"/1oLC/dOKgwAAAAAbN8FAGPMBUxjzAX6Y8wF+WPmAK5JKdyiRiPj/zgj8euqPnOP/08e4p"
"o6iosuI/zSNyTydS0j/A41JPUAAAAAAG7iBQBjzAVVY8wF2GPkAGFVfHYhRhrvwkYk4v9F"
"JOP/WCvPn89BU3w3JfHHRiTi/0Yk4vtGJOKgRiTiEAAAAAB39QUAbeEFHGrsACdGItcBRh"
"fzdUYk4vtGJOL/RiTi/0Yk4vA6JO7dRiTi/UYk4t1GJOKNRiTiQk0k+AcAAAAAAAAAAAAA"
"AABGF/8ARiTiGkYk4rRGJOLMRiTiz0Yk4vNGJOL/RiTi/0Yk4tNGJOIxRiTiAFMq/wAAAA"
"AAAAAAAAAAAAAAAAAAVCv/AE0k+gRNJPoRTST4DkYk4hFGJOJRRiTi3UYk4v9GJOJyRiTi"
"AFMq/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABTKv8ARiTiAEYk4l"
"ZGJOLgRiTiN00k+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
"AAAAAE0k+ABGJOIIRiTiT0Yk4g9NJPoAAAAAAAAAAAAAAAAA//8AAP//AAD/uwAA+/cAAP"
"H3AADgcwAA5+MAAO/PAAD23wAA/v8AAP53AAD+fwAA/58AAP/fAAD//wAA//8AAA==")
# setup logging
logging.basicConfig(
format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
datefmt="%m-%d %H:%M")
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
try:
# try to load user data from file
with open(FILE_J, 'r+') as f:
config = json.load(f)
# logger.debug("Config is loaded.")
except OSError as e:
logger.error(e)
# if file doesn't exist, we'll create it
with open(FILE_J, 'w') as f:
f.write(json.dumps(config, indent=4, sort_keys=False))
# also write/rewrite ico file
with open(path_to(FILENAME + '.ico'), 'wb') as f:
f.write(base64.b64decode(ICON))
logger.debug("Write files.")
class rutracker:
name = 'Rutracker'
url = 'https://rutracker.org/forum/'
supported_categories = {'all': '-1'}
# 'movies': '2',
# 'tv': '3',
# 'music': '4',
# 'games': '5',
# 'anime': '6',
# 'software': '7'}
def __init__(self):
# error message
self.error = None
# establish connection
self.session = build_opener()
# add proxy handler if needed
if config['proxy']:
if any(config['proxies'].values()):
self.session.add_handler(ProxyHandler(config['proxies']))
logger.debug("Proxy is set!")
else:
self.error = "Proxy enabled, but not set!"
# change user-agent
self.session.addheaders.pop()
self.session.addheaders.append(('User-Agent', config['ua']))
# load local cookies
mcj = MozillaCookieJar()
try:
mcj.load(FILE_C, ignore_discard=True)
if 'bb_session' in [cookie.name for cookie in mcj]:
# if cookie.expires < int(time.time())
logger.info("Local cookies is loaded")
self.session.add_handler(HTTPCookieProcessor(mcj))
else:
logger.info("Local cookies expired or bad")
logger.debug(f"That we have: {[cookie for cookie in mcj]}")
mcj.clear()
self.login(mcj)
except FileNotFoundError:
self.login(mcj)
def search(self, what, cat='all'):
if self.error:
self.pretty_error(what)
return
query = PATTERNS[2] % (self.url, what.replace(" ", "+"),
self.supported_categories[cat])
# make first request (maybe it enough)
t0, total = time.time(), self.searching(query, True)
if self.error:
self.pretty_error(what)
return
# do async requests
if total > 50:
qrs = [PATTERNS[3] % (query, x) for x in rng(total)]
with ThreadPoolExecutor(len(qrs)) as executor:
executor.map(self.searching, qrs, timeout=30)
logger.debug(f"--- {time.time() - t0} seconds ---")
logger.info(f"Found torrents: {total}")
def download_torrent(self, url: str):
# Download url
response = self._catch_error_request(url)
if self.error:
self.pretty_error(url)
return
# Create a torrent file
file, path = tempfile.mkstemp('.torrent')
with os.fdopen(file, "wb") as fd:
# Write it to a file
fd.write(response.read())
# return file path
logger.debug(path + " " + url)
print(path + " " + url)
def login(self, mcj):
if self.error:
return
# if we wanna use https we mast add ssl=enable_ssl to cookie
mcj.set_cookie(Cookie(0, 'ssl', "enable_ssl", None, False,
'.rutracker.org', True, False, '/', True,
False, None, 'ParserCookie', None, None, None))
self.session.add_handler(HTTPCookieProcessor(mcj))
form_data = {"login_username": config['username'],
"login_password": config['password'],
"login": "вход"}
logger.debug(f"Login. Data before: {form_data}")
# so we first encode vals to cp1251 then do default decode whole string
data_encoded = urlencode(
{k: v.encode('cp1251') for k, v in form_data.items()}).encode()
logger.debug(f"Login. Data after: {data_encoded}")
self._catch_error_request(self.url + 'login.php', data_encoded)
if self.error:
return
logger.debug(f"That we have: {[cookie for cookie in mcj]}")
if 'bb_session' in [cookie.name for cookie in mcj]:
mcj.save(FILE_C, ignore_discard=True, ignore_expires=True)
logger.info("We successfully authorized")
else:
self.error = "We not authorized, please check your credentials!"
logger.warning(self.error)
def searching(self, query, first=False):
response = self._catch_error_request(query)
if not response:
return None
page = response.read().decode('cp1251')
self.draw(page)
return int(re.search(PATTERNS[0], page)[1]) if first else -1
def draw(self, html: str):
torrents = re.findall(PATTERNS[1], html, re.S)
for tor in torrents:
local = time.strftime("%y.%m.%d", time.localtime(int(tor[6])))
torrent_date = f"[{local}] " if config['torrentDate'] else ""
prettyPrinter({
"engine_url": self.url,
"desc_link": self.url + tor[0],
"name": torrent_date + unescape(tor[1]),
"link": self.url + tor[2],
"size": unescape(tor[3]),
"seeds": tor[4] if tor[4].isdigit() else '0',
"leech": tor[5]
})
del torrents
def _catch_error_request(self, url='', data=None, retrieve=False):
url = url or self.url
try:
response = self.session.open(url, data, 5)
# checking that tracker is'nt blocked
if self.url not in response.geturl():
raise URLError(f"{self.url} is blocked. Try another proxy.")
except (socket.error, socket.timeout) as err:
if not retrieve:
return self._catch_error_request(url, data, True)
logger.error(err)
self.error = f"{self.url} is not response! Maybe it is blocked."
if "no host given" in err.args:
self.error = "Proxy is bad, try another!"
except (URLError, HTTPError) as err:
logger.error(err.reason)
self.error = err.reason
if hasattr(err, 'code'):
self.error = f"Request to {url} failed with status: {err.code}"
else:
return response
return None
def pretty_error(self, what):
prettyPrinter({"engine_url": self.url,
"desc_link": "https://github.com/imDMG/qBt_SE",
"name": f"[{unquote(what)}][Error]: {self.error}",
"link": self.url + "error",
"size": "1 TB", # lol
"seeds": 100,
"leech": 100})
self.error = None
if __name__ == "__main__":
engine = rutracker()
engine.search('doctor')
Loading…
Cancel
Save