3
0
mirror of https://github.com/jlu5/PyLink.git synced 2024-11-30 14:49:28 +01:00

relay: remove use of sched in exportDB

We were using sched and threading together? How inefficient...
This commit is contained in:
James Lu 2015-12-29 11:13:50 -08:00
parent cc79871eaf
commit d7fdeed19d

View File

@ -2,8 +2,9 @@
import sys import sys
import os import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import pickle import pickle
import sched import time
import threading import threading
import string import string
from collections import defaultdict from collections import defaultdict
@ -22,9 +23,6 @@ spawnlocks_servers = defaultdict(threading.RLock)
savecache = ExpiringDict(max_len=5, max_age_seconds=10) savecache = ExpiringDict(max_len=5, max_age_seconds=10)
killcache = ExpiringDict(max_len=5, max_age_seconds=10) killcache = ExpiringDict(max_len=5, max_age_seconds=10)
exportdb_scheduler = None
exportdb_event = None
dbname = utils.getDatabaseName('pylinkrelay') dbname = utils.getDatabaseName('pylinkrelay')
### INTERNAL FUNCTIONS ### INTERNAL FUNCTIONS
@ -47,15 +45,11 @@ def main(irc=None):
loadDB() loadDB()
global exportdb_scheduler, exportdb_event
exportdb_scheduler = sched.scheduler()
exportdb_event = exportdb_scheduler.enter(30, 1, exportDB, argument=(True,))
# Thread this because exportDB() queues itself as part of its # Thread this because exportDB() queues itself as part of its
# execution, in order to get a repeating loop. # execution, in order to get a repeating loop.
thread = threading.Thread(target=exportdb_scheduler.run) exportdb_thread = threading.Thread(target=exportDB, args=(True,), name="PyLink Relay exportDB Loop")
thread.daemon = True exportdb_thread.daemon = True
thread.start() exportdb_thread.start()
if irc is not None: if irc is not None:
for ircobj in world.networkobjects.values(): for ircobj in world.networkobjects.values():
@ -76,10 +70,6 @@ def die(sourceirc):
exportDB(reschedule=False) exportDB(reschedule=False)
# Stop all scheduled DB exports
global exportdb_scheduler, exportdb_event
exportdb_scheduler.cancel(exportdb_event)
def normalizeNick(irc, netname, nick, separator=None, uid=''): def normalizeNick(irc, netname, nick, separator=None, uid=''):
"""Creates a normalized nickname for the given nick suitable for """Creates a normalized nickname for the given nick suitable for
introduction to a remote network (as a relay client).""" introduction to a remote network (as a relay client)."""
@ -153,13 +143,21 @@ def loadDB():
def exportDB(reschedule=False): def exportDB(reschedule=False):
"""Exports the relay database, optionally creating a loop to do this """Exports the relay database, optionally creating a loop to do this
automatically.""" automatically."""
global exportdb_scheduler, exportdb_event
if reschedule and exportdb_scheduler: def dump():
exportdb_event = exportdb_scheduler.enter(30, 1, exportDB, argument=(True,))
log.debug("Relay: exporting links database to %s", dbname) log.debug("Relay: exporting links database to %s", dbname)
with open(dbname, 'wb') as f: with open(dbname, 'wb') as f:
pickle.dump(db, f, protocol=4) pickle.dump(db, f, protocol=4)
if reschedule:
while True:
# Sleep for 30 seconds between DB exports. Seems sort of
# arbitrary, but whatever.
time.sleep(30)
dump()
else: # Rescheduling was disabled; just dump the DB once.
dump()
def getPrefixModes(irc, remoteirc, channel, user, mlist=None): def getPrefixModes(irc, remoteirc, channel, user, mlist=None):
""" """
Fetches all prefix modes for a user in a channel that are supported by the Fetches all prefix modes for a user in a channel that are supported by the