2003-03-12 07:26:59 +01:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
###
|
|
|
|
# Copyright (c) 2002, Jeremiah Fincher
|
|
|
|
# All rights reserved.
|
|
|
|
#
|
|
|
|
# Redistribution and use in source and binary forms, with or without
|
|
|
|
# modification, are permitted provided that the following conditions are met:
|
|
|
|
#
|
|
|
|
# * Redistributions of source code must retain the above copyright notice,
|
|
|
|
# this list of conditions, and the following disclaimer.
|
|
|
|
# * Redistributions in binary form must reproduce the above copyright notice,
|
|
|
|
# this list of conditions, and the following disclaimer in the
|
|
|
|
# documentation and/or other materials provided with the distribution.
|
|
|
|
# * Neither the name of the author of this software nor the name of
|
|
|
|
# contributors to this software may be used to endorse or promote products
|
|
|
|
# derived from this software without specific prior written consent.
|
|
|
|
#
|
|
|
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
|
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
|
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
|
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
|
|
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
|
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
|
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
|
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
|
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
|
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
|
|
# POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
###
|
|
|
|
|
2003-03-25 07:53:51 +01:00
|
|
|
"""
|
|
|
|
Provides several commands that go out to websites and get things.
|
|
|
|
"""
|
|
|
|
|
2003-03-12 07:26:59 +01:00
|
|
|
from baseplugin import *
|
|
|
|
|
|
|
|
import re
|
2003-08-01 04:58:29 +02:00
|
|
|
import sets
|
2003-05-23 07:46:33 +02:00
|
|
|
import time
|
2003-07-23 17:10:41 +02:00
|
|
|
import random
|
2003-03-24 09:33:00 +01:00
|
|
|
import urllib
|
2003-03-12 07:26:59 +01:00
|
|
|
import urllib2
|
2003-04-06 13:28:51 +02:00
|
|
|
|
2003-08-21 14:25:35 +02:00
|
|
|
import babelfish
|
|
|
|
|
2003-03-27 21:10:10 +01:00
|
|
|
import utils
|
2003-03-12 07:26:59 +01:00
|
|
|
import debug
|
|
|
|
import privmsgs
|
|
|
|
import callbacks
|
2003-06-02 08:55:51 +02:00
|
|
|
import structures
|
2003-03-12 07:26:59 +01:00
|
|
|
|
|
|
|
class FreshmeatException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
class Http(callbacks.Privmsg):
|
|
|
|
threaded = True
|
2003-05-23 07:46:33 +02:00
|
|
|
def __init__(self):
|
|
|
|
callbacks.Privmsg.__init__(self)
|
|
|
|
self.deepthoughtq = structures.queue()
|
2003-08-01 04:58:29 +02:00
|
|
|
self.deepthoughts = sets.Set()
|
2003-05-23 07:46:33 +02:00
|
|
|
|
|
|
|
def deepthought(self, irc, msg, args):
|
|
|
|
"""takes no arguments
|
|
|
|
|
|
|
|
Returns a Deep Thought by Jack Handey.
|
|
|
|
"""
|
|
|
|
url = 'http://www.tremorseven.com/aim/deepaim.php?job=view'
|
|
|
|
thought = ' ' * 512
|
2003-06-03 05:25:19 +02:00
|
|
|
now = time.time()
|
|
|
|
while self.deepthoughtq and now - self.deepthoughtq[0][0] > 86400:
|
2003-07-29 14:48:54 +02:00
|
|
|
s = self.deepthoughtq.dequeue()[1]
|
2003-07-24 12:09:50 +02:00
|
|
|
self.deepthoughts.remove(s)
|
|
|
|
while len(thought) > 430 or thought in self.deepthoughts:
|
2003-05-23 07:46:33 +02:00
|
|
|
fd = urllib2.urlopen(url)
|
|
|
|
s = fd.read()
|
|
|
|
thought = s.split('<br>')[2]
|
|
|
|
thought = ' '.join(thought.split())
|
2003-06-03 07:24:21 +02:00
|
|
|
self.deepthoughtq.enqueue((now, thought))
|
2003-07-24 12:09:50 +02:00
|
|
|
self.deepthoughts.add(thought)
|
2003-05-23 07:46:33 +02:00
|
|
|
irc.reply(msg, thought)
|
2003-04-29 08:48:54 +02:00
|
|
|
|
2003-08-15 19:40:14 +02:00
|
|
|
_titleRe = re.compile(r'<title>(.*)</title>', re.I)
|
2003-04-29 08:48:54 +02:00
|
|
|
def title(self, irc, msg, args):
|
|
|
|
"""<url>
|
|
|
|
|
|
|
|
Returns the HTML <title>...</title> of a URL.
|
|
|
|
"""
|
|
|
|
url = privmsgs.getArgs(args)
|
2003-08-15 19:40:14 +02:00
|
|
|
if '://' not in url:
|
|
|
|
url = 'http://%s' % url
|
2003-04-29 08:48:54 +02:00
|
|
|
try:
|
|
|
|
fd = urllib2.urlopen(url)
|
|
|
|
text = fd.read()
|
|
|
|
m = self._titleRe.search(text)
|
|
|
|
if m is not None:
|
|
|
|
irc.reply(msg, m.group(1))
|
|
|
|
else:
|
|
|
|
irc.reply(msg, 'That URL appears to have no HTML title.')
|
2003-04-29 17:40:02 +02:00
|
|
|
except ValueError, e:
|
|
|
|
irc.error(msg, str(e))
|
2003-04-29 08:48:54 +02:00
|
|
|
except Exception, e:
|
|
|
|
irc.error(msg, debug.exnToString(e))
|
|
|
|
|
2003-03-22 10:07:14 +01:00
|
|
|
_fmProject = re.compile('<projectname_full>([^<]+)</projectname_full>')
|
|
|
|
_fmVersion = re.compile('<latest_version>([^<]+)</latest_version>')
|
|
|
|
_fmVitality = re.compile('<vitality_percent>([^<]+)</vitality_percent>')
|
|
|
|
_fmPopular=re.compile('<popularity_percent>([^<]+)</popularity_percent>')
|
|
|
|
_fmLastUpdated = re.compile('<date_updated>([^<]+)</date_updated>')
|
2003-03-12 07:26:59 +01:00
|
|
|
def freshmeat(self, irc, msg, args):
|
2003-04-05 21:41:59 +02:00
|
|
|
"""<project name>
|
|
|
|
|
|
|
|
Returns Freshmeat data about a given project.
|
|
|
|
"""
|
2003-03-12 07:26:59 +01:00
|
|
|
project = privmsgs.getArgs(args)
|
|
|
|
url = 'http://www.freshmeat.net/projects-xml/%s' % project
|
|
|
|
try:
|
|
|
|
fd = urllib2.urlopen(url)
|
|
|
|
text = fd.read()
|
2003-03-15 12:11:01 +01:00
|
|
|
fd.close()
|
2003-03-12 07:26:59 +01:00
|
|
|
if text.startswith('Error'):
|
|
|
|
raise FreshmeatException, text
|
|
|
|
project = self._fmProject.search(text).group(1)
|
|
|
|
version = self._fmVersion.search(text).group(1)
|
|
|
|
vitality = self._fmVitality.search(text).group(1)
|
2003-03-22 10:07:14 +01:00
|
|
|
popularity = self._fmPopular.search(text).group(1)
|
2003-03-12 07:26:59 +01:00
|
|
|
lastupdated = self._fmLastUpdated.search(text).group(1)
|
|
|
|
irc.reply(msg,
|
|
|
|
'%s, last updated %s, with a vitality percent of %s '\
|
|
|
|
'and a popularity of %s, is in version %s.' % \
|
|
|
|
(project, lastupdated, vitality, popularity, version))
|
|
|
|
except FreshmeatException, e:
|
2003-04-29 08:48:54 +02:00
|
|
|
irc.error(msg, debug.exnToString(e))
|
2003-03-12 07:26:59 +01:00
|
|
|
except Exception, e:
|
|
|
|
debug.recoverableException()
|
2003-04-29 08:48:54 +02:00
|
|
|
irc.error(msg, debug.exnToString(e))
|
2003-03-12 07:26:59 +01:00
|
|
|
|
|
|
|
def stockquote(self, irc, msg, args):
|
2003-04-05 21:41:59 +02:00
|
|
|
"""<company symbol>
|
|
|
|
|
|
|
|
Gets the information about the current price and change from the
|
|
|
|
previous day of a given compny (represented by a stock symbol).
|
|
|
|
"""
|
2003-03-12 07:26:59 +01:00
|
|
|
symbol = privmsgs.getArgs(args)
|
|
|
|
url = 'http://finance.yahoo.com/d/quotes.csv?s=%s'\
|
|
|
|
'&f=sl1d1t1c1ohgv&e=.csv' % symbol
|
|
|
|
try:
|
2003-03-15 12:11:01 +01:00
|
|
|
fd = urllib2.urlopen(url)
|
|
|
|
quote = fd.read()
|
|
|
|
fd.close()
|
2003-03-12 07:26:59 +01:00
|
|
|
except Exception, e:
|
2003-04-29 08:48:54 +02:00
|
|
|
irc.error(msg, debug.exnToString(e))
|
2003-03-12 07:26:59 +01:00
|
|
|
return
|
|
|
|
data = quote.split(',')
|
|
|
|
#debug.printf(data) # debugging
|
|
|
|
if data[1] != '0.00':
|
|
|
|
irc.reply(msg,
|
|
|
|
'The current price of %s is %s, as of %s EST. '\
|
|
|
|
'A change of %s from the last business day.' %\
|
|
|
|
(data[0][1:-1], data[1], data[3][1:-1], data[4]))
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
m = 'I couldn\'t find a listing for %s' % symbol
|
|
|
|
irc.error(msg, m)
|
|
|
|
return
|
|
|
|
|
|
|
|
def foldoc(self, irc, msg, args):
|
2003-04-05 21:41:59 +02:00
|
|
|
"""<something to lookup on foldoc>
|
|
|
|
|
|
|
|
FOLDOC is a searchable dictionary of acryonyms, jargon, programming
|
|
|
|
languages, tools, architecture, operating systems, networking, theory,
|
|
|
|
conventions, standards, methamatics, telecoms, electronics, history,
|
|
|
|
in fact anything having to do with computing. This commands searches
|
|
|
|
that dictionary.
|
|
|
|
"""
|
2003-04-16 10:35:13 +02:00
|
|
|
if not args:
|
|
|
|
raise callbacks.ArgumentError
|
2003-03-24 09:33:00 +01:00
|
|
|
search = '+'.join([urllib.quote(arg) for arg in args])
|
2003-03-12 07:26:59 +01:00
|
|
|
url = 'http://foldoc.doc.ic.ac.uk/foldoc/foldoc.cgi?query=%s' % search
|
|
|
|
try:
|
2003-03-15 12:11:01 +01:00
|
|
|
fd = urllib2.urlopen(url)
|
|
|
|
html = fd.read()
|
|
|
|
fd.close()
|
2003-03-12 07:26:59 +01:00
|
|
|
except Exception, e:
|
|
|
|
irc.error(msg, debug.exnToString(e))
|
2003-04-29 08:48:54 +02:00
|
|
|
return
|
2003-03-12 07:26:59 +01:00
|
|
|
text = html.split('<P>\n', 2)[1]
|
2003-03-12 16:12:49 +01:00
|
|
|
text = text.replace('.\n', '. ')
|
2003-03-12 07:26:59 +01:00
|
|
|
text = text.replace('\n', ' ')
|
2003-03-27 21:10:10 +01:00
|
|
|
text = utils.htmlToText(text)
|
2003-05-23 07:46:33 +02:00
|
|
|
text = text.strip()
|
|
|
|
if text:
|
|
|
|
irc.reply(msg, text)
|
|
|
|
else:
|
2003-06-14 12:40:52 +02:00
|
|
|
search = urllib.unquote(search)
|
2003-05-23 07:46:33 +02:00
|
|
|
s = 'There appears to be no definition for %s.' % search
|
|
|
|
irc.reply(msg, s)
|
2003-03-12 07:26:59 +01:00
|
|
|
|
2003-08-20 18:26:23 +02:00
|
|
|
|
2003-08-23 14:40:30 +02:00
|
|
|
_cityregex = re.compile(
|
|
|
|
r'<td><font size="4" face="arial"><b>'\
|
|
|
|
r'(.*?), (.*?),(.*?)</b></font></td>', re.IGNORECASE)
|
|
|
|
_interregex = re.compile(
|
|
|
|
r'<td><font size="4" face="arial"><b>'\
|
|
|
|
r'(.*?), (.*?)</b></font></td>', re.IGNORECASE)
|
|
|
|
_condregex = re.compile(
|
|
|
|
r'<td width="100%" colspan="2" align="center"><strong>'\
|
|
|
|
r'<font face="arial">(.*?)</font></strong></td>', re.IGNORECASE)
|
|
|
|
_tempregex = re.compile(
|
|
|
|
r'<td valign="top" align="right"><strong><font face="arial">'\
|
|
|
|
r'(.*?)</font></strong></td>', re.IGNORECASE)
|
|
|
|
def weather2(self, irc, msg, args):
|
|
|
|
"""<US zip code> <US/Canada city, state> <Foreign city, country>
|
|
|
|
|
|
|
|
Returns the approximate weather conditions for a given city.
|
|
|
|
"""
|
|
|
|
zip = privmsgs.getArgs(args)
|
|
|
|
zip = zip.replace(',','')
|
|
|
|
zip = zip.lower().split()
|
|
|
|
|
|
|
|
#If we received more than one argument, then we have received
|
|
|
|
#a city and state argument that we need to process.
|
|
|
|
if len(zip) > 1:
|
|
|
|
#If we received more than 1 argument, then we got a city with a
|
|
|
|
#multi-word name. ie ['Garden', 'City', 'KS'] instead of
|
|
|
|
#['Liberal', 'KS']. We join it together with a + to pass
|
|
|
|
#to our query
|
|
|
|
if len(zip) > 2:
|
2003-08-23 14:44:25 +02:00
|
|
|
city = '+'.join(zip[:-1]).lower()
|
|
|
|
isState = zip[-1].lower()
|
2003-08-23 14:40:30 +02:00
|
|
|
else:
|
2003-08-23 14:44:25 +02:00
|
|
|
city = zip[0].lower()
|
|
|
|
isState = zip[1].lower()
|
2003-08-23 14:40:30 +02:00
|
|
|
|
|
|
|
#We must break the States up into two sections. The US and
|
|
|
|
#Canada are the only countries that require a State argument.
|
|
|
|
|
|
|
|
#United States
|
|
|
|
realStates = sets.Set(['ak', 'al', 'ar', 'ca', 'co', 'ct', 'dc',
|
|
|
|
'de', 'fl', 'ga', 'hi', 'ia', 'id', 'il',
|
|
|
|
'in', 'ks', 'ky', 'la', 'ma', 'md', 'me',
|
|
|
|
'mi', 'mn', 'mo', 'ms', 'mt', 'nc', 'nd',
|
|
|
|
'ne', 'nh', 'nj', 'nm', 'nv', 'ny', 'oh',
|
|
|
|
'ok', 'or', 'pa', 'ri', 'sc', 'sd', 'tn',
|
|
|
|
'tx', 'ut', 'va', 'vt', 'wa', 'wi', 'wv',
|
|
|
|
'wy'])
|
|
|
|
#Canadian provinces. (Province being a metric State measurement
|
|
|
|
#mind you. :D)
|
|
|
|
fakeStates = sets.Set(['ab', 'bc', 'mb', 'nb', 'nf', 'ns', 'nt',
|
|
|
|
'nu', 'on', 'pe', 'qc', 'sk', 'yk'])
|
|
|
|
|
|
|
|
if isState in realStates:
|
|
|
|
state = isState
|
|
|
|
country = 'us'
|
|
|
|
elif isState in fakeStates:
|
|
|
|
state = isState
|
|
|
|
country = 'ca'
|
|
|
|
else:
|
|
|
|
state = ''
|
|
|
|
country = isState
|
|
|
|
|
|
|
|
#debug.printf('State: %s' % (state,))
|
|
|
|
#debug.printf('Country: %s' % (country,))
|
|
|
|
|
|
|
|
url = 'http://www.hamweather.net/cgi-bin/hw3/hw3.cgi?'\
|
|
|
|
'pass=&dpp=&forecast=zandh&config=&'\
|
|
|
|
'place=%s&state=%s&country=%s' % \
|
|
|
|
(city, state, country)
|
|
|
|
|
|
|
|
#We received a single argument. Zipcode or station id.
|
|
|
|
else:
|
|
|
|
url = 'http://www.hamweather.net/cgi-bin/hw3/hw3.cgi?'\
|
|
|
|
'config=&forecast=zandh&pands=%s&Submit=GO' % (zip[0],)
|
|
|
|
|
|
|
|
#debug.printf(url)
|
|
|
|
try:
|
|
|
|
fd = urllib2.urlopen(url)
|
|
|
|
html = fd.read()
|
|
|
|
fd.close()
|
|
|
|
|
|
|
|
headData = self._cityregex.search(html)
|
|
|
|
if headData:
|
|
|
|
(city, state, country) = headData.groups()
|
|
|
|
else:
|
|
|
|
headData = self._interregex.search(html)
|
|
|
|
(city, state) = headData.groups()
|
|
|
|
|
|
|
|
temp = self._tempregex.search(html).group(1)
|
|
|
|
conds = self._condregex.search(html).group(1)
|
|
|
|
|
|
|
|
if temp and conds and city and state:
|
|
|
|
irc.reply(msg, 'The current temperature in %s, %s is %s'\
|
|
|
|
' with %s conditions.' % (city.strip(), state.strip(),
|
|
|
|
temp, conds))
|
|
|
|
else:
|
|
|
|
irc.error(msg, 'the format of the page was odd.')
|
|
|
|
|
|
|
|
except urllib2.URLError:
|
|
|
|
irc.error(msg, 'Couldn\'t open the search page.')
|
|
|
|
except:
|
|
|
|
irc.error(msg, 'the format of the page was odd.')
|
|
|
|
|
2003-03-13 08:12:47 +01:00
|
|
|
_tempregex = re.compile('CLASS=obsTempTextA>(\d+)°F</b></td>',\
|
|
|
|
re.IGNORECASE)
|
|
|
|
_cityregex = re.compile(r'Local Forecast for (.*), (.*?) ')
|
|
|
|
_condregex = re.compile('CLASS=obsInfo2><b CLASS=obsTextA>(.*)</b></td>',\
|
|
|
|
re.IGNORECASE)
|
|
|
|
def weather(self, irc, msg, args):
|
2003-04-05 21:41:59 +02:00
|
|
|
"""<US zip code>
|
|
|
|
|
|
|
|
Returns the approximate weather conditions at a given US Zip code.
|
|
|
|
"""
|
2003-03-13 08:12:47 +01:00
|
|
|
zip = privmsgs.getArgs(args)
|
|
|
|
url = "http://www.weather.com/weather/local/%s?lswe=%s" % (zip, zip)
|
|
|
|
try:
|
|
|
|
html = urllib2.urlopen(url).read()
|
2003-03-13 08:31:38 +01:00
|
|
|
city, state = self._cityregex.search(html).groups()
|
|
|
|
temp = self._tempregex.search(html).group(1)
|
2003-03-13 08:37:21 +01:00
|
|
|
conds = self._condregex.search(html).group(1)
|
|
|
|
irc.reply(msg, 'The current temperature in %s, %s is %dF with %s'\
|
|
|
|
' conditions' % (city, state, int(temp), conds))
|
2003-03-13 08:12:47 +01:00
|
|
|
except AttributeError:
|
|
|
|
irc.error(msg, 'the format of the page was odd.')
|
|
|
|
except urllib2.URLError:
|
|
|
|
irc.error(msg, 'Couldn\'t open the search page.')
|
2003-03-12 07:26:59 +01:00
|
|
|
|
2003-03-25 10:02:03 +01:00
|
|
|
_geekquotere = re.compile('<p class="qt">(.*?)</p>')
|
|
|
|
def geekquote(self, irc, msg, args):
|
2003-04-05 21:41:59 +02:00
|
|
|
"""[<multiline>]
|
|
|
|
|
|
|
|
Returns a random geek quote from bash.org; the optional argument
|
|
|
|
<multiline> specifies whether multi-line quotes (which are longer
|
|
|
|
than other quotes, generally) are to be allowed.
|
|
|
|
"""
|
2003-03-25 10:02:03 +01:00
|
|
|
multiline = privmsgs.getArgs(args, needed=0, optional=1)
|
|
|
|
try:
|
|
|
|
fd = urllib2.urlopen('http://bash.org/?random1')
|
|
|
|
except urllib2.URLError:
|
|
|
|
irc.error(msg, 'Error connecting to geekquote server.')
|
|
|
|
return
|
|
|
|
html = fd.read()
|
|
|
|
fd.close()
|
|
|
|
if multiline:
|
|
|
|
m = self._geekquotere.search(html, re.M)
|
|
|
|
else:
|
|
|
|
m = self._geekquotere.search(html)
|
|
|
|
if m is None:
|
|
|
|
irc.error(msg, 'No quote found.')
|
|
|
|
return
|
2003-03-27 21:10:10 +01:00
|
|
|
quote = utils.htmlToText(m.group(1))
|
2003-03-25 10:02:03 +01:00
|
|
|
quote = ' // '.join(quote.splitlines())
|
|
|
|
irc.reply(msg, quote)
|
2003-08-20 18:26:23 +02:00
|
|
|
|
2003-03-31 10:10:33 +02:00
|
|
|
_acronymre = re.compile(r'<td[^w]+width="70[^>]+>(?:<b>)?([^<]+)(?:</b>)?')
|
2003-03-25 10:02:03 +01:00
|
|
|
def acronym(self, irc, msg, args):
|
2003-03-31 05:26:23 +02:00
|
|
|
"""<acronym>
|
|
|
|
|
2003-08-21 14:25:35 +02:00
|
|
|
Displays acronym matches from acronymfinder.com
|
2003-04-05 21:41:59 +02:00
|
|
|
"""
|
2003-03-25 10:02:03 +01:00
|
|
|
acronym = privmsgs.getArgs(args)
|
|
|
|
try:
|
|
|
|
url = 'http://www.acronymfinder.com/' \
|
|
|
|
'af-query.asp?String=exact&Acronym=%s' % acronym
|
2003-03-31 05:26:23 +02:00
|
|
|
request = urllib2.Request(url, headers={'User-agent':
|
|
|
|
'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT 4.0)'})
|
|
|
|
fd = urllib2.urlopen(request)
|
2003-03-25 10:02:03 +01:00
|
|
|
except urllib2.URLError:
|
|
|
|
irc.error(msg, 'Couldn\'t connect to acronymfinder.com')
|
|
|
|
return
|
|
|
|
html = fd.read()
|
|
|
|
fd.close()
|
2003-07-24 15:33:31 +02:00
|
|
|
# The following definitions are stripped and empties are removed.
|
|
|
|
defs = filter(None, map(str.strip, self._acronymre.findall(html)))
|
|
|
|
debug.printf(defs)
|
2003-03-25 10:02:03 +01:00
|
|
|
if len(defs) == 0:
|
|
|
|
irc.reply(msg, 'No definitions found.')
|
|
|
|
else:
|
2003-07-24 15:33:31 +02:00
|
|
|
s = ircutils.privmsgPayload(defs, ', or ')
|
2003-06-02 09:21:35 +02:00
|
|
|
irc.reply(msg, '%s could be %s' % (acronym, s))
|
2003-03-22 08:53:46 +01:00
|
|
|
|
2003-04-02 10:54:23 +02:00
|
|
|
_netcraftre = re.compile(r'whatos text -->(.*?)<a href="/up/acc', re.S)
|
|
|
|
def netcraft(self, irc, msg, args):
|
2003-04-05 21:41:59 +02:00
|
|
|
"""<hostname|ip>
|
|
|
|
|
|
|
|
Returns Netcraft.com's determination of what operating system and
|
|
|
|
webserver is running on the host given.
|
|
|
|
"""
|
2003-04-02 10:54:23 +02:00
|
|
|
hostname = privmsgs.getArgs(args)
|
|
|
|
url = 'http://uptime.netcraft.com/up/graph/?host=%s' % hostname
|
|
|
|
fd = urllib2.urlopen(url)
|
|
|
|
html = fd.read()
|
|
|
|
fd.close()
|
|
|
|
m = self._netcraftre.search(html)
|
|
|
|
if m:
|
|
|
|
html = m.group(1)
|
|
|
|
s = utils.htmlToText(html, tagReplace='').strip('\xa0 ')
|
|
|
|
irc.reply(msg, s[9:]) # Snip off "the site"
|
2003-08-11 19:16:03 +02:00
|
|
|
elif 'We could not get any results' in html:
|
2003-04-02 18:13:29 +02:00
|
|
|
irc.reply(msg, 'No results found for %s.' % hostname)
|
2003-04-02 10:54:23 +02:00
|
|
|
else:
|
|
|
|
irc.error(msg, 'The format of the was odd.')
|
|
|
|
|
2003-08-21 00:47:19 +02:00
|
|
|
_abbrevs = utils.abbrev(map(str.lower, babelfish.available_languages))
|
2003-08-21 14:25:35 +02:00
|
|
|
_abbrevs['de'] = 'german'
|
|
|
|
_abbrevs['jp'] = 'japanese'
|
2003-08-22 00:38:26 +02:00
|
|
|
_abbrevs['kr'] = 'korean'
|
2003-08-21 14:25:35 +02:00
|
|
|
_abbrevs['es'] = 'spanish'
|
|
|
|
_abbrevs['pt'] = 'portuguese'
|
|
|
|
_abbrevs['it'] = 'italian'
|
|
|
|
_abbrevs['zh'] = 'chinese'
|
|
|
|
for language in babelfish.available_languages:
|
|
|
|
_abbrevs[language] = language
|
2003-08-21 00:47:19 +02:00
|
|
|
def translate(self, irc, msg, args):
|
2003-08-21 14:25:35 +02:00
|
|
|
"""<from-language> <to-language> <text>
|
2003-08-20 18:26:23 +02:00
|
|
|
|
2003-08-21 14:25:35 +02:00
|
|
|
Returns the text translated to the new language.
|
2003-08-21 00:47:19 +02:00
|
|
|
"""
|
2003-08-21 14:25:35 +02:00
|
|
|
(fromLang, toLang, text) = privmsgs.getArgs(args, needed=3)
|
2003-08-21 00:47:19 +02:00
|
|
|
try:
|
2003-08-21 14:25:35 +02:00
|
|
|
fromLang = self._abbrevs[fromLang.lower()]
|
|
|
|
toLang = self._abbrevs[toLang.lower()]
|
|
|
|
translation = babelfish.translate(text, fromLang, toLang)
|
|
|
|
irc.reply(msg, translation)
|
|
|
|
except (KeyError, babelfish.LanguageNotAvailableError), e:
|
|
|
|
irc.error(msg, '%s is not a valid language. Valid languages ' \
|
|
|
|
'include %s' % \
|
|
|
|
(e, utils.commaAndify(babelfish.available_languages)))
|
2003-08-21 00:47:19 +02:00
|
|
|
except babelfish.BabelizerIOError, e:
|
2003-08-21 14:25:35 +02:00
|
|
|
irc.error(msg, e)
|
2003-08-21 00:47:19 +02:00
|
|
|
except babelfish.BabelfishChangedError, e:
|
2003-08-21 14:25:35 +02:00
|
|
|
irc.error(msg, 'Babelfish has foiled our plans by changing its ' \
|
|
|
|
'webpage format')
|
2003-08-21 00:47:19 +02:00
|
|
|
|
|
|
|
def babelize(self, irc, msg, args):
|
2003-08-21 14:25:35 +02:00
|
|
|
"""<from-language> <to-language> <text>
|
2003-08-21 00:47:19 +02:00
|
|
|
|
2003-08-21 14:25:35 +02:00
|
|
|
Translates <text> repeatedly between <from-language> and <to-language>
|
|
|
|
until it doesn't change anymore or 12 times, whichever is fewer. One
|
|
|
|
of the languages must be English.
|
2003-08-21 00:47:19 +02:00
|
|
|
"""
|
2003-08-21 14:25:35 +02:00
|
|
|
(fromLang, toLang, text) = privmsgs.getArgs(args, needed=3)
|
2003-08-21 00:47:19 +02:00
|
|
|
try:
|
2003-08-21 14:25:35 +02:00
|
|
|
fromLang = self._abbrevs[fromLang.lower()]
|
|
|
|
toLang = self._abbrevs[toLang.lower()]
|
2003-08-21 17:49:06 +02:00
|
|
|
if fromLang != 'english' and toLang != 'english':
|
|
|
|
irc.error(msg, 'One language must be English.')
|
|
|
|
return
|
2003-08-21 14:25:35 +02:00
|
|
|
translations = babelfish.babelize(text, fromLang, toLang)
|
|
|
|
irc.reply(msg, translations[-1])
|
|
|
|
except (KeyError, babelfish.LanguageNotAvailableError), e:
|
|
|
|
irc.reply(msg, '%s is not a valid language. Valid languages ' \
|
|
|
|
'include %s' % \
|
|
|
|
(e, utils.commaAndify(babelfish.available_languages)))
|
2003-08-21 00:47:19 +02:00
|
|
|
except babelfish.BabelizerIOError, e:
|
2003-08-21 14:25:35 +02:00
|
|
|
irc.reply(msg, e)
|
2003-08-21 00:47:19 +02:00
|
|
|
except babelfish.BabelfishChangedError, e:
|
2003-08-21 14:25:35 +02:00
|
|
|
irc.reply(msg, 'Babelfish has foiled our plans by changing its ' \
|
|
|
|
'webpage format')
|
|
|
|
|
|
|
|
def randomlanguage(self, irc, msg, args):
|
|
|
|
"""[<allow-english>]
|
|
|
|
|
|
|
|
Returns a random language supported by babelfish. If <allow-english>
|
|
|
|
is provided, will include English in the list of possible languages.
|
|
|
|
"""
|
|
|
|
allowEnglish = privmsgs.getArgs(args, needed=0, optional=1)
|
|
|
|
language = random.sample(babelfish.available_languages, 1)[0]
|
|
|
|
while not allowEnglish and language == 'English':
|
|
|
|
language = random.sample(babelfish.available_languages, 1)[0]
|
|
|
|
irc.reply(msg, language)
|
2003-04-02 10:54:23 +02:00
|
|
|
|
2003-03-12 07:26:59 +01:00
|
|
|
Class = Http
|
2003-04-02 10:54:23 +02:00
|
|
|
|
2003-03-24 09:41:19 +01:00
|
|
|
# vim:set shiftwidth=4 tabstop=8 expandtab textwidth=78:
|