2011-10-19 18:56:04 -04:00
|
|
|
#!/usr/bin/python3
|
2011-02-18 18:12:09 -05:00
|
|
|
"""
|
|
|
|
hs.py - hokie stalker module
|
2011-08-31 14:05:47 -04:00
|
|
|
author: mutantmonkey <mutantmonkey@mutantmonkey.in>
|
2011-02-18 18:12:09 -05:00
|
|
|
"""
|
|
|
|
|
2012-06-02 01:17:09 -04:00
|
|
|
from tools import GrumbleError
|
2011-10-19 18:56:04 -04:00
|
|
|
import web
|
|
|
|
import lxml.etree
|
2011-02-18 18:12:09 -05:00
|
|
|
|
2011-10-19 18:56:04 -04:00
|
|
|
SEARCH_URL = "https://webapps.middleware.vt.edu/peoplesearch/PeopleSearch?query={0}&dsml-version=2"
|
2011-08-31 14:05:47 -04:00
|
|
|
RESULTS_URL = "http://search.vt.edu/search/people.html?q={0}"
|
|
|
|
PERSON_URL = "http://search.vt.edu/search/person.html?person={0:d}"
|
2013-07-21 03:02:41 -04:00
|
|
|
NS = '{http://www.dsml.org/DSML}'
|
2011-02-18 18:12:09 -05:00
|
|
|
|
2011-10-19 18:56:04 -04:00
|
|
|
"""Search the people search database using the argument as a query."""
|
2011-02-18 18:12:09 -05:00
|
|
|
def search(query):
|
2011-10-19 18:56:04 -04:00
|
|
|
query = web.quote(query)
|
|
|
|
try:
|
2013-07-21 03:02:41 -04:00
|
|
|
r = web.get(SEARCH_URL.format(query), verify=False)
|
2013-06-09 01:27:24 -04:00
|
|
|
except (web.ConnectionError, web.HTTPError):
|
2012-06-02 01:17:09 -04:00
|
|
|
raise GrumbleError("THE INTERNET IS FUCKING BROKEN. Please try again later.")
|
2011-10-19 18:56:04 -04:00
|
|
|
|
2013-09-07 18:58:47 -04:00
|
|
|
# apparently the failure mode if you search for <3 characters is a blank
|
|
|
|
# XML page...
|
|
|
|
if len(r) <= 0:
|
|
|
|
return False
|
|
|
|
|
2013-07-21 03:02:41 -04:00
|
|
|
xml = lxml.etree.fromstring(r.encode('utf-8'))
|
|
|
|
results = xml.findall('{0}directory-entries/{0}entry'.format(NS))
|
2011-10-19 18:56:04 -04:00
|
|
|
if len(results) <= 0:
|
2011-08-31 14:05:47 -04:00
|
|
|
return False
|
2011-02-18 18:12:09 -05:00
|
|
|
|
2011-10-19 18:56:04 -04:00
|
|
|
ret = []
|
|
|
|
for entry in results:
|
|
|
|
entry_data = {}
|
2013-07-21 03:02:41 -04:00
|
|
|
for attr in entry.findall('{0}attr'.format(NS)):
|
2011-10-19 18:56:04 -04:00
|
|
|
entry_data[attr.attrib['name']] = attr[0].text
|
|
|
|
ret.append(entry_data)
|
|
|
|
|
|
|
|
return ret
|
2011-02-18 18:12:09 -05:00
|
|
|
|
|
|
|
def hs(phenny, input):
|
2011-08-31 14:05:47 -04:00
|
|
|
""".hs <pid/name/email> - Search for someone on Virginia Tech People Search."""
|
|
|
|
|
|
|
|
q = input.group(2)
|
|
|
|
if q is None:
|
|
|
|
return
|
|
|
|
q = q.strip()
|
2011-10-19 18:56:04 -04:00
|
|
|
results = RESULTS_URL.format(web.quote(q))
|
2011-08-31 14:05:47 -04:00
|
|
|
|
2011-10-19 18:56:04 -04:00
|
|
|
s = search(q)
|
2011-08-31 14:05:47 -04:00
|
|
|
if s:
|
|
|
|
if len(s) >1:
|
|
|
|
phenny.reply("Multiple results found; try {0}".format(results))
|
|
|
|
else:
|
2011-10-19 18:56:04 -04:00
|
|
|
for entry in s:
|
|
|
|
person = PERSON_URL.format(int(entry['uid']))
|
|
|
|
phenny.reply("{0} - {1}".format(entry['cn'], person))
|
2011-08-31 14:05:47 -04:00
|
|
|
else:
|
|
|
|
phenny.reply("No results found")
|
2011-02-18 18:12:09 -05:00
|
|
|
hs.rule = (['hs'], r'(.*)')
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2011-09-22 14:17:27 -04:00
|
|
|
print(__doc__.strip())
|