use global GrumbleError for web exceptions

master
mutantmonkey 2012-06-01 22:17:09 -07:00
parent 28cde318a8
commit 9add0985ec
16 changed files with 85 additions and 81 deletions

View File

@ -6,6 +6,7 @@ author: mutantmonkey <mutantmonkey@mutantmonkey.in>
from urllib.error import HTTPError from urllib.error import HTTPError
import web import web
from tools import GrumbleError
def commit(phenny, input): def commit(phenny, input):
""".commit - Get a What the Commit commit message.""" """.commit - Get a What the Commit commit message."""
@ -13,8 +14,8 @@ def commit(phenny, input):
try: try:
msg = web.get("http://whatthecommit.com/index.txt") msg = web.get("http://whatthecommit.com/index.txt")
except (HTTPError, IOError, ValueError): except (HTTPError, IOError, ValueError):
phenny.reply("THE INTERNET IS FUCKING BROKEN. Please try again later.") raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
phenny.reply(msg) phenny.reply(msg)
commit.commands = ['commit'] commit.commands = ['commit']

View File

@ -5,6 +5,7 @@ author: mutantmonkey <mutantmonkey@mutantmonkey.in>
""" """
from urllib.error import HTTPError from urllib.error import HTTPError
from tools import GrumbleError
import web import web
import json import json
@ -20,8 +21,7 @@ def fcc(phenny, input):
req = web.get("http://callook.info/{0}/json".format(web.quote(callsign))) req = web.get("http://callook.info/{0}/json".format(web.quote(callsign)))
data = json.loads(req) data = json.loads(req)
except (HTTPError, IOError, ValueError): except (HTTPError, IOError, ValueError):
phenny.say("THE INTERNET IS FUCKING BROKEN. Please try again later.") raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
if len(data) <= 0 or data['status'] == 'INVALID': if len(data) <= 0 or data['status'] == 'INVALID':
phenny.reply('No results found for {0}'.format(callsign)) phenny.reply('No results found for {0}'.format(callsign))

View File

@ -4,6 +4,7 @@ hs.py - hokie stalker module
author: mutantmonkey <mutantmonkey@mutantmonkey.in> author: mutantmonkey <mutantmonkey@mutantmonkey.in>
""" """
from tools import GrumbleError
import web import web
import lxml.etree import lxml.etree
@ -18,8 +19,7 @@ def search(query):
try: try:
req = web.get(SEARCH_URL.format(query)) req = web.get(SEARCH_URL.format(query))
except (HTTPError, IOError): except (HTTPError, IOError):
phenny.say("THE INTERNET IS FUCKING BROKEN. Please try again later.") raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
xml = lxml.etree.fromstring(req.encode('utf-8')) xml = lxml.etree.fromstring(req.encode('utf-8'))
results = xml.findall('{0}searchResponse/{0}searchResultEntry'.format(NS)) results = xml.findall('{0}searchResponse/{0}searchResultEntry'.format(NS))

View File

@ -26,7 +26,7 @@ def imdb(phenny, input):
m = imdb_search(query) m = imdb_search(query)
try: try:
phenny.reply('{0} ({1}): {2} http://imdb.com/title/{3}'.format(m['Title'], m['Year'], m['Plot'], m['imdbID'])) phenny.reply('{0} ({1}): {2} http://imdb.com/title/{3}'.format(m['Title'], m['Year'], m['Plot'], m['imdbID']))
except: except:
phenny.reply("No results found for '%s'." % query) phenny.reply("No results found for '%s'." % query)
imdb.commands = ['imdb'] imdb.commands = ['imdb']

View File

@ -5,6 +5,7 @@ author: mutantmonkey <mutantmonkey@mutantmonkey.in>, andreim <andreim@andreim.ne
""" """
from urllib.error import HTTPError from urllib.error import HTTPError
from tools import GrumbleError
import web import web
import json import json
@ -19,8 +20,7 @@ def linx(phenny, input):
try: try:
req = web.post("http://linx.li/vtluug", {'url': url}) req = web.post("http://linx.li/vtluug", {'url': url})
except (HTTPError, IOError): except (HTTPError, IOError):
phenny.reply("THE INTERNET IS FUCKING BROKEN. Please try again later.") raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
data = json.loads(req) data = json.loads(req)
if len(data) <= 0 or not data['success']: if len(data) <= 0 or not data['success']:
@ -56,8 +56,7 @@ def lines(phenny, input):
try: try:
req = web.post("http://linx.li/vtluuglines", {'nickname': nickname, 'date': date, 'sender': input.nick}) req = web.post("http://linx.li/vtluuglines", {'nickname': nickname, 'date': date, 'sender': input.nick})
except (HTTPError, IOError): except (HTTPError, IOError):
phenny.reply("THE INTERNET IS FUCKING BROKEN. Please try again later.") raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
phenny.reply(req) phenny.reply(req)

View File

@ -5,32 +5,31 @@ author: Ramblurr <unnamedrambler@gmail.com>
author: mutantmonkey <mutantmonkey@mutantmonkey.in> author: mutantmonkey <mutantmonkey@mutantmonkey.in>
""" """
import random
from urllib.error import HTTPError from urllib.error import HTTPError
from tools import GrumbleError
import web import web
import lxml.html import lxml.html
def fml(phenny, input): def fml(phenny, input):
""".fml""" """.fml"""
try: try:
req = web.get("http://www.fmylife.com/random") req = web.get("http://www.fmylife.com/random")
except (HTTPError, IOError): except (HTTPError, IOError):
phenny.say("I tried to use .fml, but it was broken. FML") raise GrumbleError("I tried to use .fml, but it was broken. FML")
return
doc = lxml.html.fromstring(req) doc = lxml.html.fromstring(req)
quote = doc.find_class('article')[0][0].text_content() quote = doc.find_class('article')[0][0].text_content()
phenny.say(quote) phenny.say(quote)
fml.commands = ['fml'] fml.commands = ['fml']
def mlia(phenny, input): def mlia(phenny, input):
""".mlia - My life is average.""" """.mlia - My life is average."""
try: try:
req = web.get("http://mylifeisaverage.com/") req = web.get("http://mylifeisaverage.com/")
except (HTTPError, IOError): except (HTTPError, IOError):
phenny.say("I tried to use .mlia, but it wasn't loading. MLIA") raise GrumbleError("I tried to use .mlia, but it wasn't loading. MLIA")
return
doc = lxml.html.fromstring(req) doc = lxml.html.fromstring(req)
quote = doc.find_class('story')[0][0].text_content() quote = doc.find_class('story')[0][0].text_content()
@ -38,70 +37,71 @@ def mlia(phenny, input):
phenny.say(quote) phenny.say(quote)
mlia.commands = ['mlia'] mlia.commands = ['mlia']
def mlib(phenny, input): def mlib(phenny, input):
""".mlib - My life is bro.""" """.mlib - My life is bro."""
try: try:
req = web.get("http://mylifeisbro.com/random") req = web.get("http://mylifeisbro.com/random")
except (HTTPError, IOError): except (HTTPError, IOError):
phenny.say("MLIB is out getting a case of Natty. It's chill.") raise GrumbleError("MLIB is out getting a case of Natty. It's chill.")
return
doc = lxml.html.fromstring(req) doc = lxml.html.fromstring(req)
quote = doc.find_class('storycontent')[0][0].text_content() quote = doc.find_class('storycontent')[0][0].text_content()
phenny.say(quote) phenny.say(quote)
mlib.commands = ['mlib'] mlib.commands = ['mlib']
def mlig(phenny, input): def mlig(phenny, input):
""".mlig - My life is ginger.""" """.mlig - My life is ginger."""
try: try:
req = web.get("http://www.mylifeisginger.org/random") req = web.get("http://www.mylifeisginger.org/random")
except (HTTPError, IOError): except (HTTPError, IOError):
phenny.say("Busy eating your soul. Be back soon.") raise GrumbleError("Busy eating your soul. Be back soon.")
return
doc = lxml.html.fromstring(req) doc = lxml.html.fromstring(req)
quote = doc.find_class('oldlink')[0].text_content() quote = doc.find_class('oldlink')[0].text_content()
phenny.say(quote) phenny.say(quote)
mlig.commands = ['mlig'] mlig.commands = ['mlig']
def mlih(phenny, input): def mlih(phenny, input):
""".mlih - My life is ho.""" """.mlih - My life is ho."""
try: try:
req = web.get("http://mylifeisho.com/random") req = web.get("http://mylifeisho.com/random")
except (HTTPError, IOError): except (HTTPError, IOError):
phenny.say("MLIH is giving some dome to some lax bros.") raise GrumbleError("MLIH is giving some dome to some lax bros.")
return
doc = lxml.html.fromstring(req) doc = lxml.html.fromstring(req)
quote = doc.find_class('storycontent')[0][0].text_content() quote = doc.find_class('storycontent')[0][0].text_content()
phenny.say(quote) phenny.say(quote)
mlih.commands = ['mlih'] mlih.commands = ['mlih']
def mlihp(phenny, input): def mlihp(phenny, input):
""".mlihp - My life is Harry Potter.""" """.mlihp - My life is Harry Potter."""
try: try:
req = web.get("http://www.mylifeishp.com/random") req = web.get("http://www.mylifeishp.com/random")
except (HTTPError, IOError): except (HTTPError, IOError):
phenny.say("This service is not available to Muggles.") raise GrumbleError("This service is not available to Muggles.")
return
doc = lxml.html.fromstring(req) doc = lxml.html.fromstring(req)
quote = doc.find_class('oldlink')[0].text_content() quote = doc.find_class('oldlink')[0].text_content()
phenny.say(quote) phenny.say(quote)
mlihp.commands = ['mlihp'] mlihp.commands = ['mlihp']
def mlit(phenny, input): def mlit(phenny, input):
""".mlit - My life is Twilight.""" """.mlit - My life is Twilight."""
try: try:
req = web.get("http://mylifeistwilight.com/random") req = web.get("http://mylifeistwilight.com/random")
except (HTTPError, IOError): except (HTTPError, IOError):
phenny.say("Error: Your life is too Twilight. Go outside.") raise GrumbleError("Error: Your life is too Twilight. Go outside.")
return
doc = lxml.html.fromstring(req) doc = lxml.html.fromstring(req)
quote = doc.find_class('fmllink')[0].text_content() quote = doc.find_class('fmllink')[0].text_content()
phenny.say(quote) phenny.say(quote)
mlit.commands = ['mlit'] mlit.commands = ['mlit']
if __name__ == '__main__': if __name__ == '__main__':
print(__doc__.strip()) print(__doc__.strip())

View File

@ -7,6 +7,7 @@ author: telnoratti <calvin@winkowski.me>
from urllib.error import HTTPError from urllib.error import HTTPError
from urllib import request from urllib import request
from tools import GrumbleError
import web import web
import json import json
@ -24,8 +25,7 @@ def xss(phenny, input):
try: try:
url = urlshortener(url) url = urlshortener(url)
except (HTTPError, IOError): except (HTTPError, IOError):
phenny.reply("THE INTERNET IS FUCKING BROKEN. Please try again later.") raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
phenny.reply(url) phenny.reply(url)
xss.rule = (['xss'], r'(.*)') xss.rule = (['xss'], r'(.*)')

View File

@ -7,44 +7,45 @@ author: andreim <andreim@andreim.net>
import web import web
import re import re
import json import json
from tools import GrumbleError
from random import choice from random import choice
def randomreddit(phenny, input): def randomreddit(phenny, input):
subreddit = input.group(2) subreddit = input.group(2)
if not subreddit: if not subreddit:
phenny.say(".random <subreddit> - get a random link from the subreddit's frontpage") phenny.say(".random <subreddit> - get a random link from the subreddit's frontpage")
return return
if not re.match('^[A-Za-z0-9_-]*$',subreddit): if not re.match('^[A-Za-z0-9_-]*$',subreddit):
phenny.say(input.nick + ": bad subreddit format.") phenny.say(input.nick + ": bad subreddit format.")
return return
url = "http://www.reddit.com/r/" + subreddit + "/.json" url = "http://www.reddit.com/r/" + subreddit + "/.json"
try: try:
resp = web.get(url) resp = web.get(url)
except: except:
try: try:
resp = web.get(url) resp = web.get(url)
except: except:
try: try:
resp = web.get(url) resp = web.get(url)
except: except:
phenny.reply('Reddit or subreddit unreachable.') raise GrumbleError('Reddit or subreddit unreachable.')
return
reddit = json.loads(resp) reddit = json.loads(resp)
post = choice(reddit['data']['children']) post = choice(reddit['data']['children'])
nsfw = False nsfw = False
if post['data']['over_18']: if post['data']['over_18']:
nsfw = True nsfw = True
if nsfw: if nsfw:
phenny.reply("!!NSFW!! " + post['data']['url'] + " (" + post['data']['title'] + ") !!NSFW!!") phenny.reply("!!NSFW!! " + post['data']['url'] + " (" + post['data']['title'] + ") !!NSFW!!")
else: else:
phenny.reply(post['data']['url'] + " (" + post['data']['title'] + ")") phenny.reply(post['data']['url'] + " (" + post['data']['title'] + ")")
randomreddit.commands = ['random'] randomreddit.commands = ['random']
randomreddit.priority = 'medium' randomreddit.priority = 'medium'

View File

@ -6,6 +6,7 @@ author: mutantmonkey <mutantmonkey@mutantmonkey.in>
from urllib.parse import quote as urlquote from urllib.parse import quote as urlquote
from urllib.error import HTTPError from urllib.error import HTTPError
from tools import GrumbleError
import web import web
import lxml.html import lxml.html
@ -20,8 +21,7 @@ def rule34(phenny, input):
try: try:
req = web.get("http://rule34.xxx/index.php?page=post&s=list&tags={0}".format(urlquote(q))) req = web.get("http://rule34.xxx/index.php?page=post&s=list&tags={0}".format(urlquote(q)))
except (HTTPError, IOError): except (HTTPError, IOError):
phenny.say("THE INTERNET IS FUCKING BROKEN. Please try again later.") raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
doc = lxml.html.fromstring(req) doc = lxml.html.fromstring(req)
doc.make_links_absolute('http://rule34.xxx/') doc.make_links_absolute('http://rule34.xxx/')
@ -33,8 +33,7 @@ def rule34(phenny, input):
try: try:
link = thumbs[0].find('a').attrib['href'] link = thumbs[0].find('a').attrib['href']
except AttributeError: except AttributeError:
phenny.say("THE INTERNET IS FUCKING BROKEN. Please try again later.") raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
response = '!!NSFW!! -> {0} <- !!NSFW!!'.format(link) response = '!!NSFW!! -> {0} <- !!NSFW!!'.format(link)
phenny.reply(response) phenny.reply(response)

View File

@ -5,6 +5,7 @@ author: andreim <andreim@andreim.net>
""" """
from urllib.error import HTTPError from urllib.error import HTTPError
from tools import GrumbleError
import web import web
import json import json
@ -19,8 +20,7 @@ def short(phenny, input):
try: try:
req = web.post("http://vtlu.ug/vtluug", {'lurl': url}) req = web.post("http://vtlu.ug/vtluug", {'lurl': url})
except (HTTPError, IOError): except (HTTPError, IOError):
phenny.reply("THE INTERNET IS FUCKING BROKEN. Please try again later.") raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
phenny.reply(req) phenny.reply(req)
short.rule = (['short'], r'(.*)') short.rule = (['short'], r'(.*)')

View File

@ -6,6 +6,7 @@ Copyright (c) 2011 Dafydd Crosby - http://www.dafyddcrosby.com
Licensed under the Eiffel Forum License 2. Licensed under the Eiffel Forum License 2.
""" """
from tools import GrumbleError
import re import re
import web import web
@ -29,8 +30,7 @@ def slogan(phenny, input):
slogan = remove_tags.sub('', slogan) slogan = remove_tags.sub('', slogan)
if not slogan: if not slogan:
phenny.say("Looks like an issue with sloganizer.net") raise GrumbleError("Looks like an issue with sloganizer.net")
return
phenny.say(slogan) phenny.say(slogan)
slogan.commands = ['slogan'] slogan.commands = ['slogan']
slogan.example = '.slogan Granola' slogan.example = '.slogan Granola'

View File

@ -7,6 +7,7 @@ author: mutantmonkey <mutantmonkey@mutantmonkey.in>
from urllib.parse import quote as urlquote from urllib.parse import quote as urlquote
from urllib.error import HTTPError from urllib.error import HTTPError
from tools import GrumbleError
import web import web
import lxml.html import lxml.html
import lxml.cssselect import lxml.cssselect
@ -31,8 +32,7 @@ def tfw(phenny, input, fahrenheit=False, celsius=False):
try: try:
req = web.get("http://thefuckingweather.com/?where={0}{1}".format(urlquote(where), celsius_param)) req = web.get("http://thefuckingweather.com/?where={0}{1}".format(urlquote(where), celsius_param))
except (HTTPError, IOError): except (HTTPError, IOError):
phenny.say("THE INTERNET IS FUCKING BROKEN. Please try again later.") raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
return
doc = lxml.html.fromstring(req) doc = lxml.html.fromstring(req)

View File

@ -6,6 +6,7 @@ author: mutantmonkey <mutantmonkey@mutantmonkey.in>
import urllib.request import urllib.request
from urllib.error import HTTPError from urllib.error import HTTPError
from tools import GrumbleError
import web import web
import json import json
@ -30,8 +31,8 @@ def urbandict(phenny, input):
data = req.read().decode('utf-8') data = req.read().decode('utf-8')
data = json.loads(data) data = json.loads(data)
except (HTTPError, IOError, ValueError): except (HTTPError, IOError, ValueError):
phenny.say("Urban Dictionary slemped out on me. Try again in a minute.") raise GrumbleError(
return "Urban Dictionary slemped out on me. Try again in a minute.")
if data['result_type'] == 'no_results': if data['result_type'] == 'no_results':
phenny.say("No results found for {0}".format(word)) phenny.say("No results found for {0}".format(word))

View File

@ -9,7 +9,7 @@ http://inamidst.com/phenny/
import re, urllib.request, urllib.parse, urllib.error import re, urllib.request, urllib.parse, urllib.error
import web import web
from tools import deprecated from tools import deprecated, GrumbleError
r_from = re.compile(r'(?i)([+-]\d+):00 from') r_from = re.compile(r'(?i)([+-]\d+):00 from')
@ -29,9 +29,6 @@ def location(name):
lng = results['geonames'][0]['lng'] lng = results['geonames'][0]['lng']
return name, countryName, lat, lng return name, countryName, lat, lng
class GrumbleError(object):
pass
def local(icao, hour, minute): def local(icao, hour, minute):
uri = ('http://www.flightstats.com/' + uri = ('http://www.flightstats.com/' +
'go/Airport/airportDetails.do?airportCode=%s') 'go/Airport/airportDetails.do?airportCode=%s')

View File

@ -7,6 +7,7 @@ http://github.com/randynobx/phenny/
""" """
from urllib.error import URLError, HTTPError from urllib.error import URLError, HTTPError
from tools import GrumbleError
import re import re
import web import web
@ -19,7 +20,7 @@ def wuvt(phenny, input) :
playing = web.get('http://www.wuvt.vt.edu/playlists/latest_track.php') playing = web.get('http://www.wuvt.vt.edu/playlists/latest_track.php')
djpage = web.get('http://www.wuvt.vt.edu/playlists/current_dj.php') djpage = web.get('http://www.wuvt.vt.edu/playlists/current_dj.php')
except (URLError, HTTPError): except (URLError, HTTPError):
return phenny.reply('Cannot connect to wuvt') raise GrumbleError('Cannot connect to wuvt')
play= r_play.search(playing) play= r_play.search(playing)
song = play.group(1) song = play.group(1)
artist = play.group(2) artist = play.group(2)

View File

@ -7,6 +7,11 @@ Licensed under the Eiffel Forum License 2.
http://inamidst.com/phenny/ http://inamidst.com/phenny/
""" """
class GrumbleError(Exception):
pass
def deprecated(old): def deprecated(old):
def new(phenny, input, old=old): def new(phenny, input, old=old):
self = phenny self = phenny