Browse Source

- Updated search engine plugins in order to limit to 10 pages

adaptive-webui-19844
Christophe Dumez 17 years ago
parent
commit
731438a19a
  1. 6
      src/search_engine/engines/btjunkie.py
  2. 6
      src/search_engine/engines/isohunt.py
  3. 4
      src/search_engine/engines/mininova.py
  4. 6
      src/search_engine/engines/piratebay.py
  5. 6
      src/search_engine/engines/torrentreactor.py
  6. 10
      src/search_engine/engines/versions.txt

6
src/search_engine/engines/btjunkie.py

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
#VERSION: 1.10
#VERSION: 1.11
#AUTHORS: Fabien Devaux (fab@gnux.info)
from novaprinter import prettyPrinter
import urllib
@ -10,7 +10,7 @@ class btjunkie(object): @@ -10,7 +10,7 @@ class btjunkie(object):
def search(self, what):
i = 1
while True:
while True and i<11:
res = 0
dat = urllib.urlopen(self.url+'/search?q=%s&o=52&p=%d'%(what,i)).read().decode('utf8', 'replace')
# I know it's not very readable, but the SGML parser feels in pain
@ -32,4 +32,4 @@ class btjunkie(object): @@ -32,4 +32,4 @@ class btjunkie(object):
res = res + 1
if res == 0:
break
i = i + 1
i = i + 1

6
src/search_engine/engines/isohunt.py

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
#VERSION: 1.00
#VERSION: 1.01
#AUTHORS: Gekko Dam Beer (gekko04@users.sourceforge.net)
from novaprinter import prettyPrinter
import sgmllib
@ -67,7 +67,7 @@ class isohunt(object): @@ -67,7 +67,7 @@ class isohunt(object):
def search(self, what):
i = 1
while True:
while True and i<11:
results = []
parser = self.SimpleSGMLParser(results, self.url)
dat = urllib.urlopen(self.url+'/torrents.php?ihq=%s&ihp=%s'%(what,i)).read().decode('utf-8', 'replace')
@ -75,4 +75,4 @@ class isohunt(object): @@ -75,4 +75,4 @@ class isohunt(object):
parser.close()
if len(results) <= 0:
break
i += 1
i += 1

4
src/search_engine/engines/mininova.py

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
#VERSION: 1.11
#VERSION: 1.12
#AUTHORS: Fabien Devaux (fab@gnux.info)
from novaprinter import prettyPrinter
import urllib
@ -29,7 +29,7 @@ class mininova(object): @@ -29,7 +29,7 @@ class mininova(object):
else:
return ''.join([ get_text(n) for n in txt.childNodes])
page = 1
while True:
while True and page<11:
res = 0
dat = urllib.urlopen(self.url+'/search/%s/seeds/%d'%(what, page)).read().decode('utf-8', 'replace')
dat = re.sub("<a href=\"http://www.boardreader.com/index.php.*\"", "<a href=\"plop\"", dat)

6
src/search_engine/engines/piratebay.py

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
#VERSION: 1.00
#VERSION: 1.01
#AUTHORS: Fabien Devaux (fab@gnux.info)
from novaprinter import prettyPrinter
import sgmllib
@ -68,7 +68,7 @@ class piratebay(object): @@ -68,7 +68,7 @@ class piratebay(object):
ret = []
i = 0
order = 'se'
while True:
while True and i<11:
results = []
parser = self.SimpleSGMLParser(results, self.url)
dat = urllib.urlopen(self.url+'/search/%s/%u/0/0' % (what, i)).read()
@ -76,4 +76,4 @@ class piratebay(object): @@ -76,4 +76,4 @@ class piratebay(object):
parser.close()
if len(results) <= 0:
break
i += 1
i += 1

6
src/search_engine/engines/torrentreactor.py

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
#VERSION: 1.00
#VERSION: 1.01
#AUTHORS: Gekko Dam Beer (gekko04@users.sourceforge.net)
from novaprinter import prettyPrinter
import sgmllib
@ -67,7 +67,7 @@ class torrentreactor(object): @@ -67,7 +67,7 @@ class torrentreactor(object):
def search(self, what):
i = 0
while True:
while True and i<11:
results = []
parser = self.SimpleSGMLParser(results, self.url)
dat = urllib.urlopen(self.url+'/search.php?search=&words=%s&cid=&sid=&type=2&orderby=a.seeds&asc=0&skip=%s'%(what,(i*35))).read().decode('utf-8', 'replace')
@ -75,4 +75,4 @@ class torrentreactor(object): @@ -75,4 +75,4 @@ class torrentreactor(object):
parser.close()
if len(results) <= 0:
break
i += 1
i += 1

10
src/search_engine/engines/versions.txt

@ -1,5 +1,5 @@ @@ -1,5 +1,5 @@
isohunt: 1.00
torrentreactor: 1.00
btjunkie: 1.10
mininova: 1.11
piratebay: 1.00
isohunt: 1.01
torrentreactor: 1.01
btjunkie: 1.11
mininova: 1.12
piratebay: 1.01

Loading…
Cancel
Save