qBt_SE/proxychecker.py

81 lines
2.1 KiB
Python
Raw Normal View History

2020-09-20 23:07:37 +05:00
import asyncio
import functools
import time
import threading
2020-05-07 21:56:58 +05:00
from concurrent.futures.thread import ThreadPoolExecutor
from urllib.request import build_opener, ProxyHandler
2022-03-17 00:16:56 +05:00
HOST = "http://rutor.info"
2020-05-07 22:08:47 +05:00
SCHEME = HOST[:HOST.rfind("://")]
2022-03-17 00:16:56 +05:00
PROXY_FILE = "proxylist.txt" # one address per line
2020-05-07 21:56:58 +05:00
2022-03-17 00:16:56 +05:00
async def is_good_proxy_aio(proxy: str) -> bool:
2020-05-07 21:56:58 +05:00
try:
2022-03-17 00:16:56 +05:00
await asyncio.sleep(.1)
2020-05-07 21:56:58 +05:00
opener = build_opener(ProxyHandler({f"{SCHEME}": proxy}))
opener.addheaders = [("User-agent", "Mozilla/5.0")]
2020-09-20 23:07:37 +05:00
_part = functools.partial(opener.open, HOST, timeout=3)
2022-03-17 00:16:56 +05:00
# res = await loop.run_in_executor(None, _part)
if not _part().geturl().startswith(HOST):
2020-05-07 21:56:58 +05:00
raise Exception()
2020-09-20 23:07:37 +05:00
print(proxy)
return True
2022-03-17 00:16:56 +05:00
except OSError:
2020-09-20 23:07:37 +05:00
return False
async def run_aio(proxies):
2022-03-17 00:16:56 +05:00
await asyncio.gather(*[is_good_proxy(proxy) for proxy in proxies])
2020-09-20 23:07:37 +05:00
2022-03-17 00:16:56 +05:00
def run_thread(proxies: list) -> None:
2020-09-20 23:07:37 +05:00
tasks = []
for proxy in proxies:
task = threading.Thread(target=is_good_proxy, args=(proxy,))
task.start()
tasks.append(task)
for t in tasks:
t.join()
2022-03-17 00:16:56 +05:00
def run_pool(proxies: list) -> None:
2020-09-20 23:07:37 +05:00
with ThreadPoolExecutor(len(proxies)) as executor:
2022-03-17 00:16:56 +05:00
executor.map(is_good_proxy, proxies, timeout=3)
2020-05-07 21:56:58 +05:00
2020-09-20 23:07:37 +05:00
2022-03-17 00:16:56 +05:00
def is_good_proxy(proxy: str) -> bool:
2020-09-20 23:07:37 +05:00
try:
opener = build_opener(ProxyHandler({f"{SCHEME}": proxy}))
opener.addheaders = [("User-agent", "Mozilla/5.0")]
2022-03-17 00:16:56 +05:00
with opener.open(HOST, timeout=3) as r:
if not r.geturl().startswith(HOST):
raise Exception()
except OSError:
2020-09-20 23:07:37 +05:00
return False
else:
print(proxy)
return True
2020-05-07 21:56:58 +05:00
def main():
2020-09-20 23:07:37 +05:00
t0 = time.time()
2022-03-17 00:16:56 +05:00
if PROXY_FILE.startswith("http"):
opener = build_opener()
with opener.open(PROXY_FILE) as r:
proxy_list = [x.rstrip().decode("utf-8") for x in r]
else:
with open(PROXY_FILE) as f:
proxy_list = [x.rstrip() for x in f]
2020-05-07 21:56:58 +05:00
print("Working proxies:")
2020-09-20 23:07:37 +05:00
# run_thread(proxy_list)
run_pool(proxy_list)
# asyncio.run(run_aio(proxy_list))
print(time.time() - t0)
2020-05-07 21:56:58 +05:00
if __name__ == '__main__':
main()