Updated to use webutils throughout.

This commit is contained in:
Jeremy Fincher 2004-08-05 18:29:26 +00:00
parent 0bbd05db58
commit d134a2d9ac
6 changed files with 18 additions and 30 deletions

View File

@ -39,7 +39,6 @@ import supybot.plugins as plugins
import re import re
import sets import sets
import urllib2
import supybot.registry as registry import supybot.registry as registry
@ -47,6 +46,7 @@ import supybot.conf as conf
import supybot.utils as utils import supybot.utils as utils
import supybot.plugins as plugins import supybot.plugins as plugins
import supybot.ircutils as ircutils import supybot.ircutils as ircutils
import supybot.webutils as webutils
import supybot.privmsgs as privmsgs import supybot.privmsgs as privmsgs
import supybot.callbacks as callbacks import supybot.callbacks as callbacks
@ -89,9 +89,7 @@ class Gameknot(callbacks.PrivmsgCommandAndRegexp):
def getStats(self, name): def getStats(self, name):
gkprofile = 'http://www.gameknot.com/stats.pl?%s' % name gkprofile = 'http://www.gameknot.com/stats.pl?%s' % name
try: try:
fd = urllib2.urlopen(gkprofile) profile = webutils.getUrl(gkprofile)
profile = fd.read()
fd.close()
rating = self._gkrating.search(profile).group(1) rating = self._gkrating.search(profile).group(1)
games = self._gkgames.search(profile).group(1) games = self._gkgames.search(profile).group(1)
(w, l, d) = self._gkrecord.search(profile).groups() (w, l, d) = self._gkrecord.search(profile).groups()
@ -150,8 +148,8 @@ class Gameknot(callbacks.PrivmsgCommandAndRegexp):
else: else:
raise callbacks.Error,'The format of the page was odd. %s' % \ raise callbacks.Error,'The format of the page was odd. %s' % \
conf.supybot.replies.possibleBug() conf.supybot.replies.possibleBug()
except urllib2.URLError: except webutils.WebError, e:
raise callbacks.Error, 'Couldn\'t connect to gameknot.com' raise callbacks.Error, webutils.strError(e)
def gkstats(self, irc, msg, args): def gkstats(self, irc, msg, args):
@ -174,9 +172,7 @@ class Gameknot(callbacks.PrivmsgCommandAndRegexp):
if not self.registryValue('gameSnarfer', msg.args[0]): if not self.registryValue('gameSnarfer', msg.args[0]):
return return
url = match.group(0) url = match.group(0)
fd = urllib2.urlopen(url) s = webutils.getUrl(url)
s = fd.read()
fd.close()
try: try:
if 'no longer available' in s: if 'no longer available' in s:
s = 'That game is no longer available.' s = 'That game is no longer available.'

View File

@ -42,7 +42,6 @@ import sets
import time import time
import getopt import getopt
import socket import socket
import urllib2
import xml.sax import xml.sax
import SOAP import SOAP
@ -55,6 +54,7 @@ import supybot.utils as utils
import supybot.ircmsgs as ircmsgs import supybot.ircmsgs as ircmsgs
import supybot.plugins as plugins import supybot.plugins as plugins
import supybot.ircutils as ircutils import supybot.ircutils as ircutils
import supybot.webutils as webutils
import supybot.privmsgs as privmsgs import supybot.privmsgs as privmsgs
import supybot.callbacks as callbacks import supybot.callbacks as callbacks
import supybot.structures as structures import supybot.structures as structures
@ -381,10 +381,8 @@ class Google(callbacks.PrivmsgCommandAndRegexp):
m = match.group(0) m = match.group(0)
header = {'User-agent': 'Mozilla/4.0 (compatible; MSIE 5.5; ' header = {'User-agent': 'Mozilla/4.0 (compatible; MSIE 5.5; '
'Windows NT 4.0)'} 'Windows NT 4.0)'}
request = urllib2.Request(m, headers=header) request = webutils.Request(m, headers=header)
fd = urllib2.urlopen(request) text = webutils.getUrl(request)
text = fd.read()
fd.close()
mThread = None mThread = None
mGroup = None mGroup = None
if 'threadm=' in m: if 'threadm=' in m:
@ -392,19 +390,15 @@ class Google(callbacks.PrivmsgCommandAndRegexp):
if path is None: if path is None:
return return
url = 'http://groups.google.com%s' % path.group(1) url = 'http://groups.google.com%s' % path.group(1)
request = urllib2.Request(url, headers=header) request = webutils.Request(url, headers=header)
fd = urllib2.urlopen(request) text = webutils.getUrl(request)
text = fd.read()
fd.close()
elif 'selm=' in m: elif 'selm=' in m:
path = self._ggSelm.search(m) path = self._ggSelm.search(m)
if m is None: if m is None:
return return
url = 'http://groups.google.com/groups?%s' % path.group(0) url = 'http://groups.google.com/groups?%s' % path.group(0)
request = urllib2.Request(url, headers=header) request = webutils.Request(url, headers=header)
fd = urllib2.urlopen(request) text = webutils.getUrl(request)
text = fd.read()
fd.close()
else: else:
pass pass
mThread = self._ggThread.search(text) mThread = self._ggThread.search(text)

View File

@ -42,7 +42,6 @@ import sets
import getopt import getopt
import socket import socket
import urllib import urllib
import urllib2
import xml.dom.minidom import xml.dom.minidom
from itertools import imap, ifilter from itertools import imap, ifilter
@ -235,7 +234,7 @@ class Http(callbacks.Privmsg):
acronym = privmsgs.getArgs(args) acronym = privmsgs.getArgs(args)
url = 'http://www.acronymfinder.com/' \ url = 'http://www.acronymfinder.com/' \
'af-query.asp?String=exact&Acronym=%s' % urllib.quote(acronym) 'af-query.asp?String=exact&Acronym=%s' % urllib.quote(acronym)
request = urllib2.Request(url, headers={'User-agent': request = webutils.Request(url, headers={'User-agent':
'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT 4.0)'}) 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT 4.0)'})
html = webutils.getUrl(request) html = webutils.getUrl(request)
if 'daily limit' in html: if 'daily limit' in html:

View File

@ -38,10 +38,9 @@ __revision__ = "$Id$"
import supybot.plugins as plugins import supybot.plugins as plugins
import urllib2
import supybot.utils as utils import supybot.utils as utils
import supybot.privmsgs as privmsgs import supybot.privmsgs as privmsgs
import supybot.webutils as webutils
import supybot.callbacks as callbacks import supybot.callbacks as callbacks
@ -237,8 +236,7 @@ class OSU(callbacks.Privmsg):
s = '.'.join(args) s = '.'.join(args)
url = 'http://www.ohio-state.edu/cgi-bin/inquiry2.cgi?keyword=%s' % s url = 'http://www.ohio-state.edu/cgi-bin/inquiry2.cgi?keyword=%s' % s
try: try:
fd = urllib2.urlopen(url) data = webutils.getUrl(url)
data = fd.read()
emails = [] emails = []
for line in data.splitlines(): for line in data.splitlines():
line.strip() line.strip()

View File

@ -43,7 +43,6 @@ import sets
import time import time
import random import random
import os.path import os.path
import urllib2
import UserDict import UserDict
import threading import threading

View File

@ -34,11 +34,13 @@ __revision__ = "$Id$"
import supybot.fix as fix import supybot.fix as fix
import re import re
import supybot.conf as conf
import socket import socket
import urllib2 import urllib2
import urlparse import urlparse
import supybot.conf as conf
Request = urllib2.Request
class WebError(Exception): class WebError(Exception):
pass pass