Added multilingual wikipedia search capability.
parent
6c00d59973
commit
ccd0343d13
|
@ -10,9 +10,9 @@ http://inamidst.com/phenny/
|
||||||
import re, urllib
|
import re, urllib
|
||||||
import web
|
import web
|
||||||
|
|
||||||
wikiuri = 'http://en.wikipedia.org/wiki/%s'
|
wikiuri = 'http://%s.wikipedia.org/wiki/%s'
|
||||||
wikisearch = 'http://en.wikipedia.org/wiki/Special:Search?' \
|
# wikisearch = 'http://%s.wikipedia.org/wiki/Special:Search?' \
|
||||||
+ 'search=%s&fulltext=Search'
|
# + 'search=%s&fulltext=Search'
|
||||||
|
|
||||||
r_tr = re.compile(r'(?ims)<tr[^>]*>.*?</tr>')
|
r_tr = re.compile(r'(?ims)<tr[^>]*>.*?</tr>')
|
||||||
r_paragraph = re.compile(r'(?ims)<p[^>]*>.*?</p>|<li(?!n)[^>]*>.*?</li>')
|
r_paragraph = re.compile(r'(?ims)<p[^>]*>.*?</p>|<li(?!n)[^>]*>.*?</li>')
|
||||||
|
@ -59,30 +59,30 @@ def search(term):
|
||||||
return uri[len('http://en.wikipedia.org/wiki/'):]
|
return uri[len('http://en.wikipedia.org/wiki/'):]
|
||||||
else: return term
|
else: return term
|
||||||
|
|
||||||
def wikipedia(term, last=False):
|
def wikipedia(term, language='en', last=False):
|
||||||
global wikiuri
|
global wikiuri
|
||||||
if not '%' in term:
|
if not '%' in term:
|
||||||
if isinstance(term, unicode):
|
if isinstance(term, unicode):
|
||||||
t = term.encode('utf-8')
|
t = term.encode('utf-8')
|
||||||
else: t = term
|
else: t = term
|
||||||
q = urllib.quote(t)
|
q = urllib.quote(t)
|
||||||
u = wikiuri % q
|
u = wikiuri % (language, q)
|
||||||
bytes = web.get(u)
|
bytes = web.get(u)
|
||||||
else: bytes = web.get(wikiuri % term)
|
else: bytes = web.get(wikiuri % (language, term))
|
||||||
bytes = r_tr.sub('', bytes)
|
bytes = r_tr.sub('', bytes)
|
||||||
|
|
||||||
if not last:
|
if not last:
|
||||||
r = r_redirect.search(bytes[:4096])
|
r = r_redirect.search(bytes[:4096])
|
||||||
if r:
|
if r:
|
||||||
term = urllib.unquote(r.group(1))
|
term = urllib.unquote(r.group(1))
|
||||||
return wikipedia(term, last=True)
|
return wikipedia(term, language=language, last=True)
|
||||||
|
|
||||||
paragraphs = r_paragraph.findall(bytes)
|
paragraphs = r_paragraph.findall(bytes)
|
||||||
|
|
||||||
if not paragraphs:
|
if not paragraphs:
|
||||||
if not last:
|
if not last:
|
||||||
term = search(term)
|
term = search(term)
|
||||||
return wikipedia(term, last=True)
|
return wikipedia(term, language=language, last=True)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Pre-process
|
# Pre-process
|
||||||
|
@ -115,7 +115,7 @@ def wikipedia(term, last=False):
|
||||||
if not m:
|
if not m:
|
||||||
if not last:
|
if not last:
|
||||||
term = search(term)
|
term = search(term)
|
||||||
return wikipedia(term, last=True)
|
return wikipedia(term, language=language, last=True)
|
||||||
return None
|
return None
|
||||||
sentence = m.group(0)
|
sentence = m.group(0)
|
||||||
|
|
||||||
|
@ -130,14 +130,14 @@ def wikipedia(term, last=False):
|
||||||
or ('or add a request for it' in sentence)):
|
or ('or add a request for it' in sentence)):
|
||||||
if not last:
|
if not last:
|
||||||
term = search(term)
|
term = search(term)
|
||||||
return wikipedia(term, last=True)
|
return wikipedia(term, language=language, last=True)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
sentence = '"' + sentence.replace('"', "'") + '"'
|
sentence = '"' + sentence.replace('"', "'") + '"'
|
||||||
sentence = sentence.decode('utf-8').encode('utf-8')
|
sentence = sentence.decode('utf-8').encode('utf-8')
|
||||||
wikiuri = wikiuri.decode('utf-8').encode('utf-8')
|
wikiuri = wikiuri.decode('utf-8').encode('utf-8')
|
||||||
term = term.decode('utf-8').encode('utf-8')
|
term = term.decode('utf-8').encode('utf-8')
|
||||||
return sentence + ' - ' + (wikiuri % term)
|
return sentence + ' - ' + (wikiuri % (language, term))
|
||||||
|
|
||||||
def wik(phenny, input):
|
def wik(phenny, input):
|
||||||
origterm = input.groups()[1]
|
origterm = input.groups()[1]
|
||||||
|
@ -146,12 +146,19 @@ def wik(phenny, input):
|
||||||
origterm = origterm.encode('utf-8')
|
origterm = origterm.encode('utf-8')
|
||||||
|
|
||||||
term = urllib.unquote(origterm)
|
term = urllib.unquote(origterm)
|
||||||
|
language = 'en'
|
||||||
|
if term.startswith(':') and (' ' in term):
|
||||||
|
a, b = term.split(' ', 1)
|
||||||
|
a = a.lstrip(':')
|
||||||
|
if a.isalpha():
|
||||||
|
language, term = a, b
|
||||||
term = term[0].upper() + term[1:]
|
term = term[0].upper() + term[1:]
|
||||||
term = term.replace(' ', '_')
|
term = term.replace(' ', '_')
|
||||||
|
|
||||||
try: result = wikipedia(term)
|
try: result = wikipedia(term, language)
|
||||||
except IOError:
|
except IOError:
|
||||||
error = "Can't connect to en.wikipedia.org (%s)" % (wikiuri % term)
|
args = (language, wikiuri % (language, term))
|
||||||
|
error = "Can't connect to %s.wikipedia.org (%s)" % args
|
||||||
return phenny.say(error)
|
return phenny.say(error)
|
||||||
|
|
||||||
if result is not None:
|
if result is not None:
|
||||||
|
|
Loading…
Reference in New Issue