3
0
mirror of https://github.com/jlu5/PyLink.git synced 2025-02-20 07:20:59 +01:00

relay: implement DB exporting using threading.Timer, similar to classes.Irc.schedulePing

Fixes #165 (exportDB threads not dying on unload)
This commit is contained in:
James Lu 2016-02-27 18:13:26 -08:00
parent d531201850
commit a740163cbe

View File

@ -22,6 +22,7 @@ spawnlocks = defaultdict(threading.RLock)
spawnlocks_servers = defaultdict(threading.RLock) spawnlocks_servers = defaultdict(threading.RLock)
savecache = ExpiringDict(max_len=5, max_age_seconds=10) savecache = ExpiringDict(max_len=5, max_age_seconds=10)
killcache = ExpiringDict(max_len=5, max_age_seconds=10) killcache = ExpiringDict(max_len=5, max_age_seconds=10)
exportdb_timer = None
dbname = utils.getDatabaseName('pylinkrelay') dbname = utils.getDatabaseName('pylinkrelay')
@ -43,32 +44,47 @@ def initializeAll(irc):
def main(irc=None): def main(irc=None):
"""Main function, called during plugin loading at start.""" """Main function, called during plugin loading at start."""
# Load the relay links database.
loadDB() loadDB()
# Thread this because exportDB() queues itself as part of its # Schedule periodic exports of the links database.
# execution, in order to get a repeating loop. scheduleExport(starting=True)
exportdb_thread = threading.Thread(target=exportDB, args=(True,), name="PyLink Relay exportDB Loop")
exportdb_thread.daemon = True
exportdb_thread.start()
if irc is not None: if irc is not None:
# irc is defined when the plugin is reloaded. Otherweise,
# it means that we've just started the server.
# Iterate over all known networks and initialize them.
for ircobj in world.networkobjects.values(): for ircobj in world.networkobjects.values():
initializeAll(ircobj) initializeAll(ircobj)
def die(sourceirc): def die(sourceirc):
"""Deinitialize PyLink Relay by quitting all relay clients and saving the """Deinitialize PyLink Relay by quitting all relay clients and saving the
relay DB.""" relay DB."""
# For every connected network:
for irc in world.networkobjects.values(): for irc in world.networkobjects.values():
# 1) Find all the relay clients and quit them.
for user in irc.users.copy(): for user in irc.users.copy():
if isRelayClient(irc, user): if isRelayClient(irc, user):
irc.proto.quit(user, "Relay plugin unloaded.") irc.proto.quit(user, "Relay plugin unloaded.")
# 2) SQUIT every relay subserver.
for server, sobj in irc.servers.copy().items(): for server, sobj in irc.servers.copy().items():
if hasattr(sobj, 'remote'): if hasattr(sobj, 'remote'):
irc.proto.squit(irc.sid, server, text="Relay plugin unloaded.") irc.proto.squit(irc.sid, server, text="Relay plugin unloaded.")
# 3) Clear our internal servers and users caches.
relayservers.clear() relayservers.clear()
relayusers.clear() relayusers.clear()
exportDB(reschedule=False) # 4) Export the relay links database.
exportDB()
# 5) Kill the scheduling for any other exports.
global exportdb_timer
if exportdb_timer:
log.debug("Relay: cancelling exportDB timer thread %s due to die()", threading.get_ident())
exportdb_timer.cancel()
def normalizeNick(irc, netname, nick, separator=None, uid=''): def normalizeNick(irc, netname, nick, separator=None, uid=''):
"""Creates a normalized nickname for the given nick suitable for """Creates a normalized nickname for the given nick suitable for
@ -140,23 +156,28 @@ def loadDB():
", creating a new one in memory...", dbname) ", creating a new one in memory...", dbname)
db = {} db = {}
def exportDB(reschedule=False): def exportDB():
"""Exports the relay database, optionally creating a loop to do this """Exports the relay database."""
automatically."""
def dump(): log.debug("Relay: exporting links database to %s", dbname)
log.debug("Relay: exporting links database to %s", dbname) with open(dbname, 'wb') as f:
with open(dbname, 'wb') as f: pickle.dump(db, f, protocol=4)
pickle.dump(db, f, protocol=4)
if reschedule: def scheduleExport(starting=False):
while True: """
# Sleep for 30 seconds between DB exports. Seems sort of Schedules exporting of the relay database in a repeated loop.
# arbitrary, but whatever. """
time.sleep(30) global exportdb_timer
dump()
else: # Rescheduling was disabled; just dump the DB once. if not starting:
dump() # Export the datbase, unless this is being called the first
# thing after start (i.e. DB has just been loaded).
exportDB()
# TODO: possibly make delay between exports configurable
exportdb_timer = threading.Timer(30, scheduleExport)
exportdb_timer.name = 'PyLink Relay exportDB Loop'
exportdb_timer.start()
def getPrefixModes(irc, remoteirc, channel, user, mlist=None): def getPrefixModes(irc, remoteirc, channel, user, mlist=None):
""" """