Limnoria/plugins/Google.py

417 lines
16 KiB
Python
Raw Normal View History

#!/usr/bin/env python
2003-04-08 21:16:18 +02:00
###
# Copyright (c) 2002, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
"""
Acceses Google for various things.
"""
2003-11-25 09:23:47 +01:00
__revision__ = "$Id$"
import plugins
2003-04-08 21:16:18 +02:00
import re
import sets
2003-04-08 21:16:18 +02:00
import time
import getopt
2003-10-22 18:05:18 +02:00
import socket
import urllib2
2003-12-16 22:20:27 +01:00
import xml.sax
2003-04-08 21:16:18 +02:00
2003-11-21 13:45:50 +01:00
import SOAP
2003-04-08 21:16:18 +02:00
import google
import registry
2003-10-05 13:22:29 +02:00
import conf
2003-04-08 21:16:18 +02:00
import utils
import ircmsgs
import plugins
2003-10-11 23:03:02 +02:00
import ircutils
2003-04-08 21:16:18 +02:00
import privmsgs
import callbacks
2003-05-29 18:35:35 +02:00
import structures
2003-04-08 21:16:18 +02:00
def configure(advanced):
from questions import output, expect, anything, something, yn
output('To use Google\'t Web Services, you must have a license key.')
if yn('Do you have a license key?'):
key = something('What is it?')
while len(key) != 32:
output('That\'s not a valid Google license key.')
if yn('Are you sure you have a valid Google license key?'):
key = something('What is it?')
else:
key = ''
break
if key:
conf.registerPlugin('Google', True)
conf.supybot.plugins.Google.licenseKey.setValue(key)
output("""The Google plugin has the functionality to watch for URLs
that match a specific pattern. (We call this a snarfer)
When supybot sees such a URL, it will parse the web page
for information and reply with the results.
Google has two available snarfers: Google Groups link
snarfing and a google search snarfer.""")
if yn('Do you want the Google Groups link snarfer enabled by '
'default?'):
conf.supybot.plugins.Google.groupsSnarfer.setValue(True)
if yn('Do you want the Google search snarfer enabled by default?'):
conf.supybot.plugins.Google.searchSnarfer.setValue(True)
2003-04-08 21:16:18 +02:00
else:
output("""You'll need to get a key before you can use this plugin.
You can apply for a key at http://www.google.com/apis/""")
2003-07-30 22:07:25 +02:00
2003-08-27 20:06:26 +02:00
2003-07-30 22:07:25 +02:00
totalSearches = 0
totalTime = 0
last24hours = structures.queue()
def search(log, queries, **kwargs):
assert not isinstance(queries, basestring), 'Old code: queries is a list.'
try:
global totalSearches, totalTime, last24hours
for (i, query) in enumerate(queries):
if len(query.split(None, 1)) > 1:
queries[i] = repr(query)
data = google.doGoogleSearch(' '.join(queries), **kwargs)
now = time.time()
totalSearches += 1
totalTime += data.meta.searchTime
last24hours.enqueue(now)
while last24hours and now - last24hours.peek() > 86400:
last24hours.dequeue()
return data
except socket.error, e:
if e.args[0] == 110:
2004-04-05 11:52:23 +02:00
raise callbacks.Error, 'Connection timed out to Google.com.'
else:
2004-04-05 11:52:23 +02:00
raise callbacks.Error, 'Error connecting to Google.com.'
except SOAP.HTTPError, e:
log.warning('HTTP Error accessing Google: %s', e)
raise callbacks.Error, 'Error connecting to Google.com.'
2003-11-21 13:45:50 +01:00
except SOAP.faultType, e:
log.exception('Uncaught SOAP error:')
2003-11-21 13:45:50 +01:00
raise callbacks.Error, 'Invalid Google license key.'
2003-12-16 22:01:59 +01:00
except xml.sax.SAXException, e:
log.exception('Uncaught SAX error:')
raise callbacks.Error, 'Google returned an unparseable response. ' \
'The full traceback has been logged.'
2004-04-05 11:52:23 +02:00
except SOAP.Error, e:
log.exception('Uncaught SOAP exception in Google.search:')
raise callbacks.Error, 'Error connecting to Google.com.'
class LicenseKey(registry.String):
def setValue(self, s):
if s and len(s) != 32:
raise registry.InvalidRegistryValue, 'Invalid Google license key.'
if s:
registry.String.setValue(self, s)
google.setLicense(self.value)
if not s:
registry.String.setValue(self, '')
google.setLicense(self.value)
class Language(registry.OnlySomeStrings):
validStrings = ['lang_' + s for s in 'ar zh-CN zh-TW cs da nl en et fi fr '
'de el iw hu is it ja ko lv lt no pt '
'pl ro ru es sv tr'.split()]
validStrings.append('')
def normalize(self, s):
if not s.startswith('lang_'):
s = 'lang_' + s
if not s.endswith('CN') or s.endswith('TW'):
s = s.lower()
else:
s = s.lower()[:-2] + s[-2:]
return s
conf.registerPlugin('Google')
conf.registerChannelValue(conf.supybot.plugins.Google, 'groupsSnarfer',
registry.Boolean(False, """Determines whether the groups snarfer is
enabled. If so, URLs at groups.google.com will be snarfed and their
group/title messaged to the channel."""))
conf.registerChannelValue(conf.supybot.plugins.Google, 'searchSnarfer',
registry.Boolean(False, """Determines whether the search snarfer is
enabled. If so, messages (even unaddressed ones) beginning with the word
'google' will result in the first URL Google returns being sent to the
channel."""))
conf.registerChannelValue(conf.supybot.plugins.Google, 'bold',
registry.Boolean(True, """Determines whether results are bolded."""))
conf.registerChannelValue(conf.supybot.plugins.Google, 'maximumResults',
registry.PositiveInteger(10, """Determines the maximum number of results
returned from the google command."""))
conf.registerChannelValue(conf.supybot.plugins.Google, 'defaultLanguage',
Language('lang_en', """Determines what default language is used in
searches. If left empty, no specific language will be requested."""))
conf.registerChannelValue(conf.supybot.plugins.Google, 'safeSearch',
registry.Boolean(True, "Determines whether safeSearch is on by default."))
conf.registerGlobalValue(conf.supybot.plugins.Google, 'licenseKey',
LicenseKey('', """Sets the Google license key for using Google's Web
Services API. This is necessary before you can do any searching with this
module.""", private=True))
class Google(callbacks.PrivmsgCommandAndRegexp):
2003-04-08 21:16:18 +02:00
threaded = True
regexps = sets.Set(['googleSnarfer', 'googleGroups'])
2003-04-08 21:16:18 +02:00
def __init__(self):
2003-11-11 16:58:20 +01:00
callbacks.PrivmsgCommandAndRegexp.__init__(self)
2003-04-08 21:16:18 +02:00
self.total = 0
self.totalTime = 0
2003-05-29 18:35:35 +02:00
self.last24hours = structures.queue()
2003-04-08 21:16:18 +02:00
def formatData(self, data, bold=True, max=0):
if isinstance(data, basestring):
return data
time = 'Search took %s seconds' % data.meta.searchTime
results = []
if max:
data.results = data.results[:max]
for result in data.results:
title = utils.htmlToText(result.title.encode('utf-8'))
url = result.URL
if title:
if bold:
title = ircutils.bold(title)
results.append('%s: <%s>' % (title, url))
else:
results.append(url)
2003-04-08 21:16:18 +02:00
if not results:
return 'No matches found (%s)' % time
2003-04-08 21:16:18 +02:00
else:
return '%s: %s' % (time, '; '.join(results))
2003-04-08 21:16:18 +02:00
2004-07-02 15:53:33 +02:00
def lucky(self, irc, msg, args):
"""<search>
Does a google search, but only returns the first result.
"""
data = search(self.log, args)
if data.results:
url = data.results[0].URL
irc.reply(url)
else:
irc.reply('Google found nothing.')
2003-04-08 21:16:18 +02:00
def google(self, irc, msg, args):
"""<search> [--{language,restrict}=<value>] [--{notsafe,similar}]
Searches google.com for the given string. As many results as can fit
are included. --language accepts a language abbreviation; --restrict
restricts the results to certain classes of things; --similar tells
Google not to filter similar results. --notsafe allows possibly
work-unsafe results.
2003-04-08 21:16:18 +02:00
"""
(optlist, rest) = getopt.getopt(args, '', ['language=', 'restrict=',
'notsafe', 'similar'])
kwargs = {}
if self.registryValue('safeSearch', channel=msg.args[0]):
kwargs['safeSearch'] = 1
lang = self.registryValue('defaultLanguage', channel=msg.args[0])
if lang:
kwargs['language'] = lang
for (option, argument) in optlist:
if option == '--notsafe':
kwargs['safeSearch'] = False
elif option == '--similar':
kwargs['filter'] = False
else:
kwargs[option[2:]] = argument
try:
data = search(self.log, rest, **kwargs)
except google.NoLicenseKey, e:
irc.error('You must have a free Google web services license key '
'in order to use this command. You can get one at '
'<http://google.com/apis/>. Once you have one, you can '
'set it with the command '
'"config supybot.plugins.Google.licenseKey <key>".')
return
2004-01-21 02:33:59 +01:00
bold = self.registryValue('bold', msg.args[0])
max = self.registryValue('maximumResults', msg.args[0])
irc.reply(self.formatData(data, bold=bold, max=max))
2003-04-08 21:16:18 +02:00
2003-07-30 21:08:05 +02:00
def metagoogle(self, irc, msg, args):
"""<search> [--(language,restrict)=<value>] [--{similar,notsafe}]
2003-07-30 21:08:05 +02:00
Searches google and gives all the interesting meta information about
the search. See the help for the google command for a detailed
description of the parameters.
2003-07-30 21:08:05 +02:00
"""
(optlist, rest) = getopt.getopt(args, '', ['language=', 'restrict=',
'notsafe', 'similar'])
2003-07-30 21:08:05 +02:00
kwargs = {'language': 'lang_en', 'safeSearch': 1}
for option, argument in optlist:
if option == '--notsafe':
kwargs['safeSearch'] = False
elif option == '--similar':
kwargs['filter'] = False
else:
kwargs[option[2:]] = argument
data = search(self.log, rest, **kwargs)
2003-07-30 21:08:05 +02:00
meta = data.meta
categories = [d['fullViewableName'] for d in meta.directoryCategories]
categories = [utils.dqrepr(s.replace('_', ' ')) for s in categories]
if categories:
categories = utils.commaAndify(categories)
2003-07-30 21:08:05 +02:00
else:
categories = ''
2003-07-30 21:08:05 +02:00
s = 'Search for %r returned %s %s results in %s seconds.%s' % \
(meta.searchQuery,
meta.estimateIsExact and 'exactly' or 'approximately',
meta.estimatedTotalResultsCount,
meta.searchTime,
categories and ' Categories include %s.' % categories)
irc.reply(s)
2003-07-30 21:08:05 +02:00
2004-02-07 13:11:03 +01:00
_cacheUrlRe = re.compile('<code>([^<]+)</code>')
def cache(self, irc, msg, args):
"""<url>
Returns a link to the cached version of <url> if it is available.
"""
url = privmsgs.getArgs(args)
html = google.doGetCachedPage(url)
m = self._cacheUrlRe.search(html)
if m is not None:
url = m.group(1)
url = utils.htmlToText(url)
irc.reply(url)
else:
irc.error('Google seems to have no cache for that site.')
2003-10-21 06:44:44 +02:00
def fight(self, irc, msg, args):
"""<search string> <search string> [<search string> ...]
Returns the results of each search, in order, from greatest number
of results to least.
"""
results = []
for arg in args:
data = search(self.log, [arg])
results.append((data.meta.estimatedTotalResultsCount, arg))
results.sort()
results.reverse()
s = ', '.join(['%r: %s' % (s, i) for (i, s) in results])
irc.reply(s)
2003-10-21 06:44:44 +02:00
def spell(self, irc, msg, args):
2003-09-06 20:29:44 +02:00
"""<word>
Returns Google's spelling recommendation for <word>.
"""
2003-04-08 21:16:18 +02:00
word = privmsgs.getArgs(args)
result = google.doSpellingSuggestion(word)
if result:
irc.reply(result)
2003-04-08 21:16:18 +02:00
else:
2004-04-15 01:33:18 +02:00
irc.reply('No spelling suggestion made. This could mean that '
'the word you gave is spelled right; it could also '
'mean that its spelling was too whacked out even for '
'Google to figure out.')
2003-04-08 21:16:18 +02:00
2003-10-21 06:44:44 +02:00
def info(self, irc, msg, args):
2003-04-16 09:10:31 +02:00
"""takes no arguments
Returns interesting information about this Google module. Mostly
useful for making sure you don't go over your 1000 requests/day limit.
"""
2003-07-30 22:07:25 +02:00
recent = len(last24hours)
irc.reply('This google module has been called %s total; '
'%s in the past 24 hours. '
'Google has spent %s seconds searching for me.' %
(utils.nItems('time', totalSearches),
utils.nItems('time', recent), totalTime))
2003-04-08 21:16:18 +02:00
def googleSnarfer(self, irc, msg, match):
r"^google\s+(.*)$"
2004-01-21 02:33:59 +01:00
if not self.registryValue('searchSnarfer', msg.args[0]):
return
searchString = match.group(1)
try:
data = search(self.log, [searchString], safeSearch=1)
except google.NoLicenseKey:
return
if data.results:
url = data.results[0].URL
irc.reply(url, prefixName=False)
2003-11-08 09:07:44 +01:00
googleSnarfer = privmsgs.urlSnarfer(googleSnarfer)
_ggThread = re.compile(r'<br>Subject: ([^<]+)<br>')
_ggPlainThread = re.compile(r'Subject: (.*)')
_ggGroup = re.compile(r'Newsgroups: (?:<a[^>]+>)?([^<]+)(?:</a>)?')
_ggPlainGroup = re.compile(r'Newsgroups: (.*)')
def googleGroups(self, irc, msg, match):
r"http://groups.google.com/[^\s]+"
2004-01-21 02:33:59 +01:00
if not self.registryValue('groupsSnarfer', msg.args[0]):
2003-10-21 06:44:44 +02:00
return
request = urllib2.Request(match.group(0), headers= \
{'User-agent': 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT 4.0)'})
fd = urllib2.urlopen(request)
text = fd.read()
fd.close()
mThread = None
mGroup = None
if '&prev=/' in match.group(0):
path = re.search('view the <a href=([^>]+)>no',text)
if path is None:
return
url = 'http://groups.google.com'
request = urllib2.Request('%s%s' % (url,path.group(1)),
headers={'User-agent': 'Mozilla/4.0 (compatible; MSIE 5.5;'
'Windows NT 4.0)'})
fd = urllib2.urlopen(request)
text = fd.read()
fd.close()
mThread = self._ggThread.search(text)
mGroup = self._ggGroup.search(text)
2003-11-21 12:58:16 +01:00
elif '&output=gplain' in match.group(0):
mThread = self._ggPlainThread.search(text)
mGroup = self._ggPlainGroup.search(text)
2003-11-21 12:58:16 +01:00
else:
mThread = self._ggThread.search(text)
mGroup = self._ggGroup.search(text)
if mThread and mGroup:
irc.reply('Google Groups: %s, %s' % (mGroup.group(1),
mThread.group(1)), prefixName = False)
else:
irc.errorPossibleBug('That doesn\'t appear to be a proper '
'Google Groups page.')
2003-11-08 09:07:44 +01:00
googleGroups = privmsgs.urlSnarfer(googleGroups)
2003-10-19 23:04:35 +02:00
Class = Google
2003-04-08 21:16:18 +02:00
# vim:set shiftwidth=4 tabstop=8 expandtab textwidth=78: