use global GrumbleError for web exceptions

master
mutantmonkey 2012-06-01 22:17:09 -07:00
parent 28cde318a8
commit 9add0985ec
16 changed files with 85 additions and 81 deletions

View File

@ -6,6 +6,7 @@ author: mutantmonkey <mutantmonkey@mutantmonkey.in>
from urllib.error import HTTPError
import web
from tools import GrumbleError
def commit(phenny, input):
""".commit - Get a What the Commit commit message."""
@ -13,8 +14,8 @@ def commit(phenny, input):
try:
msg = web.get("http://whatthecommit.com/index.txt")
except (HTTPError, IOError, ValueError):
phenny.reply("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
phenny.reply(msg)
commit.commands = ['commit']

View File

@ -5,6 +5,7 @@ author: mutantmonkey <mutantmonkey@mutantmonkey.in>
"""
from urllib.error import HTTPError
from tools import GrumbleError
import web
import json
@ -20,8 +21,7 @@ def fcc(phenny, input):
req = web.get("http://callook.info/{0}/json".format(web.quote(callsign)))
data = json.loads(req)
except (HTTPError, IOError, ValueError):
phenny.say("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
if len(data) <= 0 or data['status'] == 'INVALID':
phenny.reply('No results found for {0}'.format(callsign))

View File

@ -4,6 +4,7 @@ hs.py - hokie stalker module
author: mutantmonkey <mutantmonkey@mutantmonkey.in>
"""
from tools import GrumbleError
import web
import lxml.etree
@ -18,8 +19,7 @@ def search(query):
try:
req = web.get(SEARCH_URL.format(query))
except (HTTPError, IOError):
phenny.say("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
xml = lxml.etree.fromstring(req.encode('utf-8'))
results = xml.findall('{0}searchResponse/{0}searchResultEntry'.format(NS))

View File

@ -25,8 +25,8 @@ def imdb(phenny, input):
if not query: return phenny.reply('.imdb what?')
m = imdb_search(query)
try:
phenny.reply('{0} ({1}): {2} http://imdb.com/title/{3}'.format(m['Title'], m['Year'], m['Plot'], m['imdbID']))
try:
phenny.reply('{0} ({1}): {2} http://imdb.com/title/{3}'.format(m['Title'], m['Year'], m['Plot'], m['imdbID']))
except:
phenny.reply("No results found for '%s'." % query)
phenny.reply("No results found for '%s'." % query)
imdb.commands = ['imdb']

View File

@ -5,6 +5,7 @@ author: mutantmonkey <mutantmonkey@mutantmonkey.in>, andreim <andreim@andreim.ne
"""
from urllib.error import HTTPError
from tools import GrumbleError
import web
import json
@ -19,8 +20,7 @@ def linx(phenny, input):
try:
req = web.post("http://linx.li/vtluug", {'url': url})
except (HTTPError, IOError):
phenny.reply("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
data = json.loads(req)
if len(data) <= 0 or not data['success']:
@ -56,8 +56,7 @@ def lines(phenny, input):
try:
req = web.post("http://linx.li/vtluuglines", {'nickname': nickname, 'date': date, 'sender': input.nick})
except (HTTPError, IOError):
phenny.reply("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
phenny.reply(req)

View File

@ -5,32 +5,31 @@ author: Ramblurr <unnamedrambler@gmail.com>
author: mutantmonkey <mutantmonkey@mutantmonkey.in>
"""
import random
from urllib.error import HTTPError
from tools import GrumbleError
import web
import lxml.html
def fml(phenny, input):
""".fml"""
try:
req = web.get("http://www.fmylife.com/random")
except (HTTPError, IOError):
phenny.say("I tried to use .fml, but it was broken. FML")
return
raise GrumbleError("I tried to use .fml, but it was broken. FML")
doc = lxml.html.fromstring(req)
quote = doc.find_class('article')[0][0].text_content()
phenny.say(quote)
fml.commands = ['fml']
def mlia(phenny, input):
""".mlia - My life is average."""
try:
req = web.get("http://mylifeisaverage.com/")
except (HTTPError, IOError):
phenny.say("I tried to use .mlia, but it wasn't loading. MLIA")
return
raise GrumbleError("I tried to use .mlia, but it wasn't loading. MLIA")
doc = lxml.html.fromstring(req)
quote = doc.find_class('story')[0][0].text_content()
@ -38,70 +37,71 @@ def mlia(phenny, input):
phenny.say(quote)
mlia.commands = ['mlia']
def mlib(phenny, input):
""".mlib - My life is bro."""
try:
req = web.get("http://mylifeisbro.com/random")
except (HTTPError, IOError):
phenny.say("MLIB is out getting a case of Natty. It's chill.")
return
raise GrumbleError("MLIB is out getting a case of Natty. It's chill.")
doc = lxml.html.fromstring(req)
quote = doc.find_class('storycontent')[0][0].text_content()
phenny.say(quote)
mlib.commands = ['mlib']
def mlig(phenny, input):
""".mlig - My life is ginger."""
try:
req = web.get("http://www.mylifeisginger.org/random")
except (HTTPError, IOError):
phenny.say("Busy eating your soul. Be back soon.")
return
raise GrumbleError("Busy eating your soul. Be back soon.")
doc = lxml.html.fromstring(req)
quote = doc.find_class('oldlink')[0].text_content()
phenny.say(quote)
mlig.commands = ['mlig']
def mlih(phenny, input):
""".mlih - My life is ho."""
try:
req = web.get("http://mylifeisho.com/random")
except (HTTPError, IOError):
phenny.say("MLIH is giving some dome to some lax bros.")
return
raise GrumbleError("MLIH is giving some dome to some lax bros.")
doc = lxml.html.fromstring(req)
quote = doc.find_class('storycontent')[0][0].text_content()
phenny.say(quote)
mlih.commands = ['mlih']
def mlihp(phenny, input):
""".mlihp - My life is Harry Potter."""
try:
req = web.get("http://www.mylifeishp.com/random")
except (HTTPError, IOError):
phenny.say("This service is not available to Muggles.")
return
raise GrumbleError("This service is not available to Muggles.")
doc = lxml.html.fromstring(req)
quote = doc.find_class('oldlink')[0].text_content()
phenny.say(quote)
mlihp.commands = ['mlihp']
def mlit(phenny, input):
""".mlit - My life is Twilight."""
try:
req = web.get("http://mylifeistwilight.com/random")
except (HTTPError, IOError):
phenny.say("Error: Your life is too Twilight. Go outside.")
return
raise GrumbleError("Error: Your life is too Twilight. Go outside.")
doc = lxml.html.fromstring(req)
quote = doc.find_class('fmllink')[0].text_content()
phenny.say(quote)
mlit.commands = ['mlit']
if __name__ == '__main__':
print(__doc__.strip())

View File

@ -7,6 +7,7 @@ author: telnoratti <calvin@winkowski.me>
from urllib.error import HTTPError
from urllib import request
from tools import GrumbleError
import web
import json
@ -24,9 +25,8 @@ def xss(phenny, input):
try:
url = urlshortener(url)
except (HTTPError, IOError):
phenny.reply("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
phenny.reply(url)
xss.rule = (['xss'], r'(.*)')

View File

@ -7,45 +7,46 @@ author: andreim <andreim@andreim.net>
import web
import re
import json
from tools import GrumbleError
from random import choice
def randomreddit(phenny, input):
subreddit = input.group(2)
if not subreddit:
phenny.say(".random <subreddit> - get a random link from the subreddit's frontpage")
return
if not re.match('^[A-Za-z0-9_-]*$',subreddit):
phenny.say(input.nick + ": bad subreddit format.")
return
subreddit = input.group(2)
if not subreddit:
phenny.say(".random <subreddit> - get a random link from the subreddit's frontpage")
return
if not re.match('^[A-Za-z0-9_-]*$',subreddit):
phenny.say(input.nick + ": bad subreddit format.")
return
url = "http://www.reddit.com/r/" + subreddit + "/.json"
try:
resp = web.get(url)
except:
try:
resp = web.get(url)
except:
try:
resp = web.get(url)
except:
phenny.reply('Reddit or subreddit unreachable.')
return
reddit = json.loads(resp)
post = choice(reddit['data']['children'])
url = "http://www.reddit.com/r/" + subreddit + "/.json"
try:
resp = web.get(url)
except:
try:
resp = web.get(url)
except:
try:
resp = web.get(url)
except:
raise GrumbleError('Reddit or subreddit unreachable.')
reddit = json.loads(resp)
post = choice(reddit['data']['children'])
nsfw = False
if post['data']['over_18']:
nsfw = True
if nsfw:
phenny.reply("!!NSFW!! " + post['data']['url'] + " (" + post['data']['title'] + ") !!NSFW!!")
else:
phenny.reply(post['data']['url'] + " (" + post['data']['title'] + ")")
nsfw = False
if post['data']['over_18']:
nsfw = True
if nsfw:
phenny.reply("!!NSFW!! " + post['data']['url'] + " (" + post['data']['title'] + ") !!NSFW!!")
else:
phenny.reply(post['data']['url'] + " (" + post['data']['title'] + ")")
randomreddit.commands = ['random']
randomreddit.priority = 'medium'
randomreddit.thread = False
randomreddit.thread = False

View File

@ -6,6 +6,7 @@ author: mutantmonkey <mutantmonkey@mutantmonkey.in>
from urllib.parse import quote as urlquote
from urllib.error import HTTPError
from tools import GrumbleError
import web
import lxml.html
@ -20,8 +21,7 @@ def rule34(phenny, input):
try:
req = web.get("http://rule34.xxx/index.php?page=post&s=list&tags={0}".format(urlquote(q)))
except (HTTPError, IOError):
phenny.say("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
doc = lxml.html.fromstring(req)
doc.make_links_absolute('http://rule34.xxx/')
@ -33,8 +33,7 @@ def rule34(phenny, input):
try:
link = thumbs[0].find('a').attrib['href']
except AttributeError:
phenny.say("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
response = '!!NSFW!! -> {0} <- !!NSFW!!'.format(link)
phenny.reply(response)

View File

@ -5,6 +5,7 @@ author: andreim <andreim@andreim.net>
"""
from urllib.error import HTTPError
from tools import GrumbleError
import web
import json
@ -19,8 +20,7 @@ def short(phenny, input):
try:
req = web.post("http://vtlu.ug/vtluug", {'lurl': url})
except (HTTPError, IOError):
phenny.reply("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
phenny.reply(req)
short.rule = (['short'], r'(.*)')

View File

@ -6,6 +6,7 @@ Copyright (c) 2011 Dafydd Crosby - http://www.dafyddcrosby.com
Licensed under the Eiffel Forum License 2.
"""
from tools import GrumbleError
import re
import web
@ -29,8 +30,7 @@ def slogan(phenny, input):
slogan = remove_tags.sub('', slogan)
if not slogan:
phenny.say("Looks like an issue with sloganizer.net")
return
raise GrumbleError("Looks like an issue with sloganizer.net")
phenny.say(slogan)
slogan.commands = ['slogan']
slogan.example = '.slogan Granola'

View File

@ -7,6 +7,7 @@ author: mutantmonkey <mutantmonkey@mutantmonkey.in>
from urllib.parse import quote as urlquote
from urllib.error import HTTPError
from tools import GrumbleError
import web
import lxml.html
import lxml.cssselect
@ -31,8 +32,7 @@ def tfw(phenny, input, fahrenheit=False, celsius=False):
try:
req = web.get("http://thefuckingweather.com/?where={0}{1}".format(urlquote(where), celsius_param))
except (HTTPError, IOError):
phenny.say("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
doc = lxml.html.fromstring(req)

View File

@ -6,6 +6,7 @@ author: mutantmonkey <mutantmonkey@mutantmonkey.in>
import urllib.request
from urllib.error import HTTPError
from tools import GrumbleError
import web
import json
@ -30,8 +31,8 @@ def urbandict(phenny, input):
data = req.read().decode('utf-8')
data = json.loads(data)
except (HTTPError, IOError, ValueError):
phenny.say("Urban Dictionary slemped out on me. Try again in a minute.")
return
raise GrumbleError(
"Urban Dictionary slemped out on me. Try again in a minute.")
if data['result_type'] == 'no_results':
phenny.say("No results found for {0}".format(word))

View File

@ -9,7 +9,7 @@ http://inamidst.com/phenny/
import re, urllib.request, urllib.parse, urllib.error
import web
from tools import deprecated
from tools import deprecated, GrumbleError
r_from = re.compile(r'(?i)([+-]\d+):00 from')
@ -29,9 +29,6 @@ def location(name):
lng = results['geonames'][0]['lng']
return name, countryName, lat, lng
class GrumbleError(object):
pass
def local(icao, hour, minute):
uri = ('http://www.flightstats.com/' +
'go/Airport/airportDetails.do?airportCode=%s')

View File

@ -7,6 +7,7 @@ http://github.com/randynobx/phenny/
"""
from urllib.error import URLError, HTTPError
from tools import GrumbleError
import re
import web
@ -19,7 +20,7 @@ def wuvt(phenny, input) :
playing = web.get('http://www.wuvt.vt.edu/playlists/latest_track.php')
djpage = web.get('http://www.wuvt.vt.edu/playlists/current_dj.php')
except (URLError, HTTPError):
return phenny.reply('Cannot connect to wuvt')
raise GrumbleError('Cannot connect to wuvt')
play= r_play.search(playing)
song = play.group(1)
artist = play.group(2)

View File

@ -7,6 +7,11 @@ Licensed under the Eiffel Forum License 2.
http://inamidst.com/phenny/
"""
class GrumbleError(Exception):
pass
def deprecated(old):
def new(phenny, input, old=old):
self = phenny