add another mylife module
parent
4d8dfcbcf9
commit
8d54baa901
|
@ -11,58 +11,74 @@ from urllib2 import urlopen, HTTPError
|
||||||
import lxml.html
|
import lxml.html
|
||||||
|
|
||||||
def fml(phenny, input):
|
def fml(phenny, input):
|
||||||
""".fml"""
|
""".fml"""
|
||||||
try:
|
try:
|
||||||
req = urlopen("http://www.fmylife.com/random")
|
req = urlopen("http://www.fmylife.com/random")
|
||||||
except HTTPError:
|
except HTTPError:
|
||||||
phenny.say("THE INTERNET IS FUCKING BROKEN. Please try again later.")
|
phenny.say("I tried to use .fml, but it was broken. FML"
|
||||||
return
|
return
|
||||||
|
|
||||||
doc = lxml.html.parse(req)
|
doc = lxml.html.parse(req)
|
||||||
quote = doc.getroot().find_class('article')[0][0].text_content()
|
quote = doc.getroot().find_class('article')[0][0].text_content()
|
||||||
phenny.say(quote)
|
phenny.say(quote)
|
||||||
fml.commands = ['fml']
|
fml.commands = ['fml']
|
||||||
|
|
||||||
def mlia(phenny, input):
|
def mlia(phenny, input):
|
||||||
""".mlia"""
|
""".mlia - My life is average."""
|
||||||
try:
|
try:
|
||||||
req = urlopen("http://mylifeisaverage.com/")
|
req = urlopen("http://mylifeisaverage.com/")
|
||||||
except HTTPError:
|
except HTTPError:
|
||||||
phenny.say("THE INTERNET IS FUCKING BROKEN. Please try again later.")
|
phenny.say("I tried to use .mlia, but it wasn't loading. MLIA")
|
||||||
return
|
return
|
||||||
|
|
||||||
doc = lxml.html.parse(req)
|
doc = lxml.html.parse(req)
|
||||||
quote = doc.getroot().find_class('story')[0][0].text_content()
|
quote = doc.getroot().find_class('story')[0][0].text_content()
|
||||||
quote = quote.strip()
|
quote = quote.strip()
|
||||||
phenny.say(quote)
|
phenny.say(quote)
|
||||||
mlia.commands = ['mlia']
|
mlia.commands = ['mlia']
|
||||||
|
|
||||||
def mlih(phenny, input):
|
def mliarab(phenny, input):
|
||||||
""".mlih"""
|
""".mliarab - My life is Arabic."""
|
||||||
try:
|
try:
|
||||||
req = urlopen("http://mylifeisho.com/random")
|
req = urlopen("http://mylifeisarabic.com/random/")
|
||||||
except HTTPError:
|
except HTTPError:
|
||||||
phenny.say("THE INTERNET IS FUCKING BROKEN. Please try again later.")
|
phenny.say("The site you requested, mylifeisarabic.com, has been banned \
|
||||||
return
|
in the UAE. You will be reported to appropriate authorities")
|
||||||
|
return
|
||||||
|
|
||||||
doc = lxml.html.parse(req)
|
doc = lxml.html.parse(req)
|
||||||
quote = doc.getroot().find_class('storycontent')[0][0].text_content()
|
quote = doc.getroot().find_class('entry')[0][0].text_content()
|
||||||
phenny.say(quote)
|
quote = quote.strip()
|
||||||
|
phenny.say(quote)
|
||||||
|
mliarab.commands = ['mliarab']
|
||||||
|
|
||||||
|
|
||||||
|
def mlih(phenny, input):
|
||||||
|
""".mlih - My life is ho."""
|
||||||
|
try:
|
||||||
|
req = urlopen("http://mylifeisho.com/random")
|
||||||
|
except HTTPError:
|
||||||
|
phenny.say("MLIH is giving some dome to some lax bros.")
|
||||||
|
return
|
||||||
|
|
||||||
|
doc = lxml.html.parse(req)
|
||||||
|
quote = doc.getroot().find_class('storycontent')[0][0].text_content()
|
||||||
|
phenny.say(quote)
|
||||||
mlih.commands = ['mlih']
|
mlih.commands = ['mlih']
|
||||||
|
|
||||||
def mlib(phenny, input):
|
def mlib(phenny, input):
|
||||||
""".mlib"""
|
""".mlib"""
|
||||||
try:
|
try:
|
||||||
req = urlopen("http://mylifeisbro.com/random")
|
req = urlopen("http://mylifeisbro.com/random")
|
||||||
except HTTPError:
|
except HTTPError:
|
||||||
phenny.say("MLIB is out getting a case of Natty. It's chill.")
|
phenny.say("MLIB is out getting a case of Natty. It's chill.")
|
||||||
return
|
return
|
||||||
|
|
||||||
doc = lxml.html.parse(req)
|
doc = lxml.html.parse(req)
|
||||||
quote = doc.getroot().find_class('storycontent')[0][0].text_content()
|
quote = doc.getroot().find_class('storycontent')[0][0].text_content()
|
||||||
phenny.say(quote)
|
phenny.say(quote)
|
||||||
mlib.commands = ['mlib']
|
mlib.commands = ['mlib']
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
print __doc__.strip()
|
print __doc__.strip()
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue