Browse Source

- Fixed torrentreactor search engine

adaptive-webui-19844
Christophe Dumez 17 years ago
parent
commit
9540d45c1d
  1. 10
      src/search_engine/engines/torrentreactor.py
  2. 2
      src/search_engine/engines/versions.txt

10
src/search_engine/engines/torrentreactor.py

@ -1,4 +1,4 @@
#VERSION: 1.01 #VERSION: 1.02
#AUTHORS: Gekko Dam Beer (gekko04@users.sourceforge.net) #AUTHORS: Gekko Dam Beer (gekko04@users.sourceforge.net)
from novaprinter import prettyPrinter from novaprinter import prettyPrinter
import sgmllib import sgmllib
@ -22,9 +22,7 @@ class torrentreactor(object):
if params['href'].startswith('http://dl.torrentreactor.net/download.php'): if params['href'].startswith('http://dl.torrentreactor.net/download.php'):
self.current_item = {} self.current_item = {}
self.td_counter = 0 self.td_counter = 0
equal = params['href'].find("=") self.current_item['link'] = params['href'].strip()
amp = params['href'].find("&", equal+1)
self.id = str(int(params['href'][equal+1:amp]))
def handle_data(self, data): def handle_data(self, data):
if self.td_counter == 0: if self.td_counter == 0:
@ -47,11 +45,10 @@ class torrentreactor(object):
def start_td(self,attr): def start_td(self,attr):
if isinstance(self.td_counter,int): if isinstance(self.td_counter,int):
self.td_counter += 1 self.td_counter += 1
if self.td_counter > 7: if self.td_counter > 3:
self.td_counter = None self.td_counter = None
# add item to results # add item to results
if self.current_item: if self.current_item:
self.current_item['link']='http://download.torrentreactor.net/download.php?id=%s&name=%s'%(self.id, urllib.quote(self.current_item['name']))
self.current_item['engine_url'] = self.url self.current_item['engine_url'] = self.url
if not self.current_item['seeds'].isdigit(): if not self.current_item['seeds'].isdigit():
self.current_item['seeds'] = 0 self.current_item['seeds'] = 0
@ -71,6 +68,7 @@ class torrentreactor(object):
results = [] results = []
parser = self.SimpleSGMLParser(results, self.url) parser = self.SimpleSGMLParser(results, self.url)
dat = urllib.urlopen(self.url+'/search.php?search=&words=%s&cid=&sid=&type=2&orderby=a.seeds&asc=0&skip=%s'%(what,(i*35))).read().decode('utf-8', 'replace') dat = urllib.urlopen(self.url+'/search.php?search=&words=%s&cid=&sid=&type=2&orderby=a.seeds&asc=0&skip=%s'%(what,(i*35))).read().decode('utf-8', 'replace')
#print "loading page: "+self.url+'/search.php?search=&words=%s&cid=&sid=&type=2&orderby=a.seeds&asc=0&skip=%s'%(what,(i*35))
parser.feed(dat) parser.feed(dat)
parser.close() parser.close()
if len(results) <= 0: if len(results) <= 0:

2
src/search_engine/engines/versions.txt

@ -1,5 +1,5 @@
isohunt: 1.01 isohunt: 1.01
torrentreactor: 1.01 torrentreactor: 1.02
btjunkie: 1.11 btjunkie: 1.11
mininova: 1.12 mininova: 1.12
piratebay: 1.01 piratebay: 1.01

Loading…
Cancel
Save